import ast
import base64
import codecs
import json
import logging
import os
import re
import subprocess
import tempfile
import threading
import time
import traceback
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
from json import JSONDecodeError
from pipes import quote
from threading import Thread
from job import JobCtx

import openpyxl
import pandas as pd
import io
from flask import Flask, render_template, request, Response, stream_with_context, jsonify
import requests

import job
import one_runtime
import util
import script
from script import task_runner, common_client, pe_copilot, tool_client
from script.pe_copilot import ReactReq, CopilotReq
from script.task_runner import NodeTask, EvalTask
from script.common_storage import node_data_storage, ScriptNodeData, call_record_storage, odps_storage, eval_data_storage, plan_storage, ScriptNodePlan, embed_storage, EmbedData

app = Flask(__name__)
# 中文编码
app.config['JSON_AS_ASCII'] = False
app.logger.setLevel(logging.DEBUG)

#
# class CustomHandler(logging.Handler):
#     def emit(self, record):
#         # 获取日志消息
#         msg = self.format(record)
#         # 使用你的日志函数记录日志消息
#         util.info('web', msg)
#
#
# # 日志转接
# handler = CustomHandler()
# app.logger.addHandler(handler)

p_index = 0
body_map = {}


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/test')
def test():
    return render_template('test.html')


@app.route('/link')
def link():
    return render_template('horse_up_down_class.html')


@app.route('/dagre')
def dagre():
    return render_template('draw_dagre_result.html')


@app.route('/promptb')
def promptb():
    return render_template('prompt_engineer.html')


@app.route('/prompts')
def prompts():
    if one_runtime.get_config("ENABLE_PROMPTS", "false") != 'true':
        raise ValueError('please enable ENABLE_PROMPTS=true')
    return render_template('prompt_script.html')


@app.route('/embed')
def embed_hub():
    return render_template('embed_hub.html')


@app.route('/set_body', methods=['POST'])
def set_body():
    body = request.form.get('body', '')
    global p_index
    p_index += 1
    body_map[str(p_index)] = body
    return str(p_index)


@app.route('/chat')
def chat():
    p_index = request.args.get('p_index', '')
    prompt = body_map.pop(p_index)
    model = request.args.get('model', '')
    print(model, p_index, prompt)
    # 2 stream_with_context 设置SSE连接函数，mimetype="text/event-stream" 是设置SSE返回格式
    response = Response(stream_with_context(event_stream(model, prompt)), mimetype="text/event-stream")
    response.headers["Cache-Control"] = "no-cache"
    response.headers["X-Accel-Buffering"] = "no"
    return response



@app.route('/re_test')
def re_test():
    p_index = request.args.get('p_index', '')
    body = json.loads(body_map.pop(p_index))
    test_point = body.get('test_point')
    test_param = body.get('test_param')
    detail_code = body.get('detail_code')
    # 2 stream_with_context 设置SSE连接函数，mimetype="text/event-stream" 是设置SSE返回格式
    response = Response(stream_with_context(re_test_stream(test_point, test_param,detail_code)), mimetype="text/event-stream")
    response.headers["Cache-Control"] = "no-cache"
    response.headers["X-Accel-Buffering"] = "no"
    return response


def re_test_stream(test_point, test_param, detail_code):
    test_id = str(time.time())
    util.log_local.run_data = {
        "test_id": test_id
    }
    if not test_point or not test_param or not detail_code or not isinstance(test_param, dict):
        yield "data:" + json.dumps({
            'finished': True,
            'fail': True,
            'test_id': test_id,
            'message': 'test_point or test_param or detail_code can not be null'
        }, ensure_ascii=False) + "\n\n"

    try:
        test_fun = parse_dynamic_load_code(detail_code, 're_test_' + test_point, [test_param])
        data = test_fun(test_param)
        last = None
        if inspect.isgenerator(data):
            for data_i in data:
                if data_i is None:
                    continue
                last = data_i
                yield "data:" + json.dumps({
                    'finished': False,
                    'fail': False,
                    'test_id': test_id,
                    'data': data_i
                }, ensure_ascii=False) + "\n\n"
        else:
            last = data

        yield "data:" + json.dumps({
            'finished': True,
            'fail': False,
            'test_id': test_id,
            'data': last
        }, ensure_ascii=False) + "\n\n"

    except Exception as e:
        yield "data:" + json.dumps({
            'finished': True,
            'fail': True,
            'test_id': test_id,
            'message': str(e)
        }, ensure_ascii=False) + "\n\n"
    finally:
        # 清理线程局部变量
        if hasattr(util.log_local, "run_data"):
            del util.log_local.run_data




@app.route('/prompt_copilot')
def prompt_copilot():
    p_index = request.args.get('p_index', '')
    prompt = body_map.pop(p_index)
    model = request.args.get('model', '')
    print(model, p_index, prompt)
    # 2 stream_with_context 设置SSE连接函数，mimetype="text/event-stream" 是设置SSE返回格式
    data = json.loads(prompt)
    req = CopilotReq(model_params=request.args.to_dict(), messages=data.get('messages'), action=data.get('action'), ctx={})

    def format_sse(data_generator):
        for data in data_generator:
            yield f"data: {json.dumps(data, ensure_ascii=False)}\n\n"

    response = Response(stream_with_context(format_sse(pe_copilot.copilot_predict(req))), mimetype="text/event-stream")
    response.headers["Cache-Control"] = "no-cache"
    response.headers["X-Accel-Buffering"] = "no"
    return response


@app.route('/react_ops')
def react_ops():
    p_index = request.args.get('p_index', '')
    prompt = body_map.pop(p_index)
    model = request.args.get('model', '')
    print(model, p_index, prompt)
    # 2 stream_with_context 设置SSE连接函数，mimetype="text/event-stream" 是设置SSE返回格式
    data = json.loads(prompt)
    model_params = request.args.to_dict()
    tools_str = model_params.pop('tools') if model_params.get('tools') else ''
    tools = []
    if tools_str:
        tools = json.loads(tools_str)
    params = {
        'sys_pre': model_params.pop('sys_pre') if model_params.get('sys_pre') else None,
        'sys_react_pre': model_params.pop('sys_react_pre') if model_params.get('sys_react_pre') else None,
        'sys_react_after': model_params.pop('sys_react_after') if model_params.get('sys_react_after') else None,
        'user_react_pre': model_params.pop('user_react_pre') if model_params.get('user_react_pre') else None
    }
    begin_time = time.time()
    react_id = str(begin_time)
    model_params['self_key'] = True
    req = ReactReq(model_params=model_params, tools=tools, messages=data.get('messages'), params=params, react_id=react_id)

    def format_sse(data_generator):
        for data in data_generator:
            ret = {
                'msg_len': len(data.messages),
                'cost': round(time.time() - begin_time, 1),
                'react_id': react_id,
                'money': data.ext_info.get('money') if data.ext_info else None,
                'finished': data.finished,
                'fail': data.status is not True,
                'message': data.message,
                'content': data.result,
                'thought': data.thought
            }
            yield f"data: {json.dumps(ret, ensure_ascii=False, default=str)}\n\n"

    response = Response(stream_with_context(format_sse(tool_client.react_ops(req))), mimetype="text/event-stream")
    response.headers["Cache-Control"] = "no-cache"
    response.headers["X-Accel-Buffering"] = "no"
    return response


def event_stream(model, prompt):
    if not prompt or not model:
        yield "data:" + json.dumps({
            'finished': True,
            'fail': True,
            'message': 'prompt or model can not be null'
        }, ensure_ascii=False) + "\n\n"
    try:
        # 官方gpt
        if model.startswith('gpt-'):
            messages = None
            try:
                safe_len = 16000
                messages = json.loads(prompt)
                # 系统信息摘出来
                system_message = messages[0]
                if (not system_message or system_message.get('role') == 'system'):
                    system_message = None
                else:
                    messages = messages[1:]

                if system_message:
                    safe_len -= len(system_message.get('content', ''))

                # 最多5对历史
                messages = messages[-11:]
                messages = common_client.trim_messages(messages, safe_len)
                if system_message:
                    messages.insert(0, system_message)

            except:
                messages = [{
                    "role": "user",
                    "content": prompt
                }]
            domain = request.args.get('domain') if request.args.get('domain') else 'https://api.openai.com'
            sk = request.args.get('sk')
            if not sk:
                raise ValueError('sk 不可为空')
            payload_obj = {
                'model': model[4:],
                "messages": messages,
                'stream': True
            }
            headers = {
                'Content-Type': 'application/json',
                'Authorization': 'Bearer ' + sk
            }
            url = domain + "/v1/chat/completions"
            start_time = time.time()

            res = requests.request("POST", url, headers=headers, stream=True, data=json.dumps(payload_obj),
                                   timeout=300)
            decoder = codecs.iterdecode(res.iter_lines(), 'utf-8')
            content = {
                'out': '',
                'msg_len': len(messages)
            }
            for line in decoder:
                print('收到openai响应：' + line)
                if not line:
                    continue
                try:
                    json.loads(line)
                    raise ValueError(f'请求失败,非流式响应,{line}')
                except JSONDecodeError:
                    pass
                if not line.startswith('data:'):
                    continue
                content['cost'] = round(time.time() - start_time, 1)

                if line[5:].strip() == '[DONE]':
                    content['finished'] = True
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                    break
                cur_content = json.loads(line[5:])
                request_id = cur_content.get('id')
                content['request_id'] = request_id
                output = cur_content.get('choices')
                if not output or 'delta' not in output[0]:
                    raise ValueError('获取output失败,' + str(cur_content))
                output = output[0]
                finish_reason = output.get('finish_reason') if output.get(
                    'finish_reason') is not None and output.get('finish_reason') != 'null' else None

                if finish_reason:
                    content['finished'] = True
                    content['finish_reason'] = finish_reason
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                    break
                elif output.get('delta').get('content') is not None:
                    content['out'] = content['out'] + output.get('delta').get('content')
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
        elif model == 'dashscope':
            ak = request.args.get('ak', None, str)
            if not ak:
                raise ValueError('ak 不可为空')
            model_name = request.args.get('model_name', None, str)
            if not model_name:
                raise ValueError('model_name 不可为空')
            # messages = None
            #  非gpt就手动拼
            messages = json.loads(prompt)
            # 系统信息摘出来
            system_message = messages[0]
            if (not system_message or system_message.get('role') != 'system'):
                system_message = None
            else:
                messages = messages[1:]
            # 最多5对历史
            messages = messages[-11:]
            if system_message:
                messages.insert(0, system_message)
            url = "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"
            payload_obj = {
                "model": model_name,
                "input": {},
                "parameters": {}
            }
            temperature = request.args.get('temperature', None, float)
            if temperature is not None:
                payload_obj['parameters']['temperature'] = temperature
            top_k = request.args.get('top_k', None, int)
            if top_k is not None:
                payload_obj['parameters']['top_k'] = top_k
            max_tokens = request.args.get('max_tokens', None, int)
            if max_tokens is not None:
                payload_obj['parameters']['max_tokens'] = max_tokens
            payload_obj['input']['messages'] = messages
            stop = request.args.get('stop')
            if stop:
                payload_obj['parameters']['stop'] = json.loads(stop)

            payload = json.dumps(payload_obj)
            headers = {
                'Content-Type': 'application/json',
                'X-DashScope-SSE': 'enable',
                'Authorization': 'Bearer ' + ak
            }
            content = {
                'out': '',
                'msg_len': len(messages)
            }
            start_time = time.time()
            response = requests.request("POST", url, stream=True, timeout=300, headers=headers, data=payload)
            if response.status_code != 200:
                raise ValueError(f"请求失败,{str(response.content)}")
            decoder = codecs.iterdecode(response.iter_lines(), 'utf-8')
            for line in decoder:
                if not line:
                    continue
                print('收到dashscope响应：' + line)
                try:
                    json.loads(line)
                    raise ValueError(f'请求失败,非流式响应,{line}')
                except JSONDecodeError:
                    pass
                if not line.startswith('data:'):
                    continue
                res = json.loads(line[5:])
                output = res.get('output')
                if not output:
                    raise ValueError(f'解析output失败,{line}')
                finish_reason = output.get('finish_reason') if output.get('finish_reason') is not None and output.get('finish_reason') != 'null' else None
                if output is None:
                    raise ValueError(f'解析output失败,{line}')
                message = {
                    'role': 'assistant',
                    'content': output.get('text')
                }
                if finish_reason:
                    if message.get('content'):
                        content['out'] = message.get('content') if message.get('content') else content['out']
                    content['finish_reason'] = finish_reason
                    usage = res.get('usage')
                    if usage and isinstance(usage, dict):
                        content['promptTokens'] = usage.get('input_tokens')
                        content['completionTokens'] = usage.get('output_tokens')
                    content['finished'] = True
                    content['cost'] = round(time.time() - start_time, 1)
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                    break
                else:
                    if message.get('content'):
                        content['out'] = message.get('content') if message.get('content') else content['out']
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
        else:
            raise ValueError("un support model:" + model)
    except Exception as error:
        print('event_stream error :' + str(error))
        yield "data:" + json.dumps({
            'finished': True,
            'fail': True,
            'message': str(error)
        }, ensure_ascii=False) + "\n\n"


@app.route('/upload_excel_pe_scheme', methods=['POST'])
def upload_excel_pe_scheme():
    if 'excel_file' not in request.files:
        return "No file found"

    file = request.files['excel_file']

    def parse_excel(file_name):
        wb = openpyxl.load_workbook(file_name)
        sheet = wb.active
        headers = [cell.value for cell in sheet[1]]
        rows = []
        for row_num, row in enumerate(sheet.iter_rows(min_row=2, values_only=True), 2):
            # 当 row_num 大于 1000 时，跳出循环
            if row_num > 2001:
                break
            row_dict = {
            }
            for header, cell in zip(headers, row):
                row_dict[header] = cell
            rows.append(row_dict)
        return headers, rows

    with tempfile.NamedTemporaryFile(suffix='.xlsx', delete=False) as temp_file:
        file.save(temp_file.name)
        headers, rows = parse_excel(temp_file.name)
    if rows:
        for row in rows:
            for key in row:
                if pd.isnull(row[key]) or pd.isna(row[key]):
                    row[key] = None
                    continue
                elif not row[key] or not isinstance(row[key], str):
                    continue  # row[key] = row[key].replace("\\n", "\n")
            pe_scheme = row.get('prompt_scheme')
            if pe_scheme:
                pe_scheme_obj = json.loads(pe_scheme)
                args = pe_scheme_obj.get('args')
                if not isinstance(args, dict):
                    continue
                del row['prompt_scheme']
                row.update(args)
                row['history_'] = pe_scheme_obj.get('history')

    return json.dumps(rows, ensure_ascii=False, default=str)


@app.route('/dynamic_load', methods=['POST'])
def dynamic_load():
    req = request.get_json()
    code = req.get('code')
    func = req.get('func')
    args = req.get('args')
    if not code or not func or not args:
        return json.dumps({
            'success': False,
            'message': 'code or func or args不可为空'
        }, ensure_ascii=False)

    try:
        loader = parse_dynamic_load_code(code, func, args)
        data = loader(*args)
        if data and not isinstance(data, dict) and not isinstance(data, list):
            raise ValueError('返回值必须是字典或列表')
        return json.dumps({
            'success': True,
            'data': data if data else None
        }, ensure_ascii=False, default=str)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False)


@app.route('/read_odps_pe_scheme', methods=['POST'])
def read_odps_pe_scheme():
    sql = request.form.get('sql')
    if not sql:
        return json.dumps({
            'success': False,
            'message': 'sql 不可为空'
        }, ensure_ascii=False)
    rows = odps_storage.read(sql)
    new_rows = []
    for row in rows:
        pe_scheme = row.get('prompt_scheme')
        if pe_scheme:
            pe_scheme_obj = json.loads(pe_scheme)
            args = pe_scheme_obj.get('args')
            if not isinstance(args, dict):
                continue
            del row['prompt_scheme']
            row.update(args)
            if 'history_' in row:
                del row['history_']
            new_rows.append({
                'args': row,
                'history_': pe_scheme_obj.get('history')
            })
        else:
            history_ = row.get('history_')
            if 'history_' in row:
                del row['prompt_scheme']
            new_rows.append({
                'args': row,
                'history_': history_
            })

    return json.dumps({
        "success": True,
        'data': {
            'sql': sql,
            'examples': new_rows
        }
    }, ensure_ascii=False, default=str)


@app.route('/export_excel', methods=['POST'])
def export_excel():
    data = request.get_json()
    headers = data['headers']
    rows = data['rows']
    # 将字典转换为JSON字符串
    for item in rows:
        for key, value in item.items():
            if isinstance(value, (dict, list)):
                item[key] = json.dumps(value, ensure_ascii=False, default=str)
            elif isinstance(value, bool):
                item[key] = str(value).lower()
            elif isinstance(value, (float, int)):
                item[key] = "" if pd.isna(value) else str(value)

    df = pd.DataFrame(rows, columns=headers)

    for header in headers:
        # 将可能会被转换为科学计数法的列转换为字符串格式
        df[header] = df[header].fillna('').astype(str)
    # Write DataFrame to Excel and save it to a BytesIO buffer
    output = io.BytesIO()
    with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
        df.to_excel(writer, index=False)

    # Get the base64-encoded Excel data and create a Response with it
    excel_data = base64.b64encode(output.getvalue()).decode('utf-8')
    output.close()

    # Set Content-Disposition header with quoted filename
    filename = 'exported_data.xlsx'
    content_disposition = f'attachment; filename={quote(filename)}'

    return Response(base64.b64decode(excel_data), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', headers={
        'Content-Disposition': content_disposition
    })


@app.route('/chat_b')
def chat_b():
    p_index = request.args.get('p_index', '')
    req_body = body_map.pop(p_index)
    print(p_index, req_body)
    # 2 stream_with_context 设置SSE连接函数，mimetype="text/event-stream" 是设置SSE返回格式
    response = Response(stream_with_context(event_stream_b(req_body)), mimetype="text/event-stream")
    response.headers["Cache-Control"] = "no-cache"
    response.headers["X-Accel-Buffering"] = "no"
    return response


def event_stream_b(req_body):
    if not req_body:
        yield "data:" + json.dumps({
            'all_finished': True,
            'fail': True,
            'message': 'req_body can not be null'
        }, ensure_ascii=False) + "\n\n"
        return
    body = json.loads(req_body)
    plans = body.get('plans')
    if not plans:
        yield "data:" + json.dumps({
            'all_finished': True,
            'fail': True,
            'message': 'plans can not be null'
        }, ensure_ascii=False) + "\n\n"
        return
    rows = body.get('rows')
    if not rows:
        yield "data:" + json.dumps({
            'all_finished': True,
            'fail': True,
            'message': 'rows can not be null'
        }, ensure_ascii=False) + "\n\n"
        return

    plan_name = None
    id = None
    try:
        for r_index, row in enumerate(rows):
            for p_index, plan in enumerate(plans):
                id = row['id']
                messages = [item.copy() for item in plan.get('messages')]
                model = plan.get('model')
                plan_name = plan.get('plan_name')
                params = plan.get('params')
                stop = json.loads(params.get('stop')) if params.get('stop') else None
                tools = plan.get('tools')
                pre_func = parse_prompt_pre_code(plan.get('pre_code'))
                convert_func = parse_prompt_convert_code(plan.get('convert_code'))
                content = {
                    'out': '',
                    'tools': tools,
                    'out':''
                }
                row.update({
                    'plan_name': plan_name
                })
                try:
                    content.update(row)
                    if pre_func:
                        pre_ret = pre_func(row, messages)
                        row['pre_ret'] = pre_ret
                        content['pre_ret'] = pre_ret
                    # 如果列中有一列叫history_那么将之当做历史信息,history也是个list，将之append到messages的第二个开始的位置
                    history_ = row.get('history_')
                    if isinstance(history_, str):
                        history_ = json.loads(history_)
                    if isinstance(history_, list):
                        history_.reverse()
                        for message in history_:
                            message['role'] = 'user' if message['role'] != 'assistant' else 'assistant'
                            messages.insert(1 if messages and messages[0].get('role') == 'system' else 0, message)

                    real_messages = util.build_messages(messages, row)
                    if not real_messages:
                        raise ValueError('messages不可为空')
                    if tools:
                        tools_name, tools_desc = util.parse_llm_tools(tools)
                        # 剔除第一条系统消息
                        if real_messages and real_messages[0].get('role') == 'system':
                            raise ValueError('工具模式不支持设置系统指令')
                        util.merge_react_messages(messages=real_messages, params={**params, **row}, tools_name=tools_name, tools_desc=tools_desc, observation='', support_assistant_predict='gpt' in model)

                    row.update({
                        'messages': real_messages
                    })
                    print(f'处理行, plan_name:{plan_name}, messages:{real_messages}')
                    content.update(row)
                    if not real_messages:
                        raise ValueError('prompt为空')
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                    # 官方gpt
                    if model.startswith('gpt-'):
                        domain = params.get('domain') if params.get('domain') else 'https://api.openai.com'
                        sk = params.get('sk')
                        if not sk:
                            raise ValueError('sk 不可为空')
                        payload_obj = {
                            'model': model[4:],
                            "messages": real_messages,
                            "stop":stop,
                            'stream':True
                        }
                        if params.get('temperature'):
                            payload_obj['temperature'] = float(params.get('temperature'))
                        if params.get('top_p'):
                            payload_obj['top_p'] = float(params.get('top_p'))
                        if params.get('top_k'):
                            payload_obj['top_k'] = int(params.get('top_k'))
                        if params.get('max_tokens'):
                            payload_obj['max_tokens'] = int(params.get('max_tokens'))
                        headers = {
                            'Content-Type': 'application/json',
                            'Authorization': 'Bearer ' + sk
                        }
                        url = domain+"/v1/chat/completions"
                        start_time = time.time()

                        res = requests.request("POST", url, headers=headers, stream=True, data=json.dumps(payload_obj),
                                               timeout=300)
                        decoder = codecs.iterdecode(res.iter_lines(), 'utf-8')
                        for line in decoder:
                            print('收到openai响应：' + line)
                            if not line:
                                continue
                            try:
                                json.loads(line)
                                raise ValueError(f'请求失败,非流式响应,{line}')
                            except JSONDecodeError:
                                pass
                            if not line.startswith('data:'):
                                continue
                            content['cost'] = round(time.time() - start_time, 1)

                            if line[5:].strip() == '[DONE]':
                                content['finished'] = True
                                content['finish_reason'] = 'stop'
                                if convert_func:
                                    try:
                                        content['convert_ret'] = convert_func(content['out'], True)
                                    except Exception as e:
                                        content['convert_ret'] = '__error__' + str(e)
                                if r_index == len(rows) - 1 and p_index == len(plans) - 1:
                                    content['all_finished'] = True
                                yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                            cur_content = json.loads(line[5:])
                            request_id = cur_content.get('id')
                            content['request_id'] = request_id
                            output = cur_content.get('choices')
                            if not output or 'delta' not in output[0]:
                                raise ValueError('获取output失败,' + str(cur_content))
                            output = output[0]
                            finish_reason = output.get('finish_reason') if output.get(
                                'finish_reason') is not None and output.get('finish_reason') != 'null' else None

                            if finish_reason:
                                content['finished'] = True
                                content['finish_reason'] = finish_reason
                                if convert_func:
                                    try:
                                        content['convert_ret'] = convert_func(content['out'], True)
                                    except Exception as e:
                                        content['convert_ret'] = '__error__' + str(e)
                                if r_index == len(rows) - 1 and p_index == len(plans) - 1:
                                    content['all_finished'] = True
                                yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                                break
                            elif output.get('delta').get('content') is not None:
                                content['out'] = content['out'] + output.get('delta').get('content')
                                if convert_func:
                                    try:
                                        content['convert_ret'] = convert_func(content['out'], False)
                                    except Exception as e:
                                        content['convert_ret'] = '__error__' + str(e)
                                yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"

                    elif model == 'dashscope':
                        sk = params.get('ak')
                        if not sk:
                            raise ValueError('ak 不可为空')
                        model_name = params.get('model_name')
                        if not model_name:
                            raise ValueError('model_name 不可为空')
                        url = "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"
                        payload_obj = {
                            "model": model_name,
                            "input": {},
                            "parameters": {}
                        }
                        temperature = params.get('temperature', None)
                        if temperature is not None:
                            payload_obj['parameters']['temperature'] = float(temperature)
                        top_k = params.get('top_k', None)
                        if top_k is not None:
                            payload_obj['parameters']['top_k'] = int(top_k)
                        max_tokens = params.get('max_tokens', None)
                        if max_tokens is not None:
                            payload_obj['parameters']['max_tokens'] = int(max_tokens)
                        payload_obj['input']['messages'] = real_messages
                        if stop:
                            payload_obj['parameters']['stop'] = stop

                        payload = json.dumps(payload_obj)
                        headers = {
                            'Content-Type': 'application/json',
                            'X-DashScope-SSE': 'enable',
                            'Authorization': 'Bearer ' + sk
                        }
                        start_time = time.time()
                        response = requests.request("POST", url, stream=True, timeout=300, headers=headers, data=payload)
                        if response.status_code != 200:
                            raise ValueError(f"请求失败,{str(response.content)}")
                        decoder = codecs.iterdecode(response.iter_lines(), 'utf-8')
                        for line in decoder:
                            if not line:
                                continue
                            print('收到dashscope响应：' + line)
                            try:
                                json.loads(line)
                                raise ValueError(f'请求失败,非流式响应,{line}')
                            except JSONDecodeError:
                                pass
                            if not line.startswith('data:'):
                                continue
                            output = res.get('output')
                            if not output:
                                raise ValueError(f'解析output失败,{line}')
                            finish_reason = output.get('finish_reason') if output.get('finish_reason') is not None and output.get('finish_reason') != 'null' else None
                            if output is None:
                                raise ValueError(f'解析output失败,{line}')
                            message = {
                                'role': 'assistant',
                                'content': output.get('text')
                            }
                            if finish_reason:
                                if message.get('content'):
                                    content['out'] = message.get('content') if message.get('content') else content['out']
                                if convert_func:
                                    try:
                                        content['convert_ret'] = convert_func(content['out'], True)
                                    except Exception as e:
                                        content['convert_ret'] = '__error__' + str(e)
                                content['finish_reason'] = finish_reason
                                usage = res.get('usage')
                                if usage and isinstance(usage, dict):
                                    content['promptTokens'] = usage.get('input_tokens')
                                    content['completionTokens'] = usage.get('output_tokens')
                                content['finished'] = True
                                content['cost'] = round(time.time() - start_time, 1)
                                if r_index == len(rows) - 1 and p_index == len(plans) - 1:
                                    content['all_finished'] = True
                                yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                                break
                            else:
                                if message.get('content'):
                                    content['out'] = message.get('content') if message.get('content') else content['out']
                                if convert_func:
                                    try:
                                        content['convert_ret'] = convert_func(content['out'], False)
                                    except Exception as e:
                                        content['convert_ret'] = '__error__' + str(e)
                                yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
                    else:
                        raise ValueError("un support model:" + model)
                except Exception as e1:
                    content['finished'] = True
                    content['plan_fail'] = True
                    content['plan_message'] = str(e1)
                    if r_index == len(rows) - 1 and p_index == len(plans) - 1:
                        content['all_finished'] = True
                    yield "data:" + json.dumps(content, ensure_ascii=False) + "\n\n"
    except Exception as error:
        print('event_stream_b error :' + str(error))
        yield "data:" + json.dumps({
            'all_finished': True,
            'finished': True,
            'plan_fail': True,
            'plan_message': str(error),
            'fail': True,
            'id': id,
            'plan_name': plan_name,
            'message': str(error)
        }, ensure_ascii=False) + "\n\n"
        return


def parse_prompt_convert_code(code):
    if not code:
        return None

    class CodeParser(ast.NodeVisitor):
        def __init__(self):
            self.convert_exists = False

        def visit_FunctionDef(self, node):
            if node.name == 'convert' and len(node.args.args) == 2:
                self.convert_exists = True

        def parse(self, code):
            self.visit(ast.parse(code))

    parser = CodeParser()
    parser.parse(code)
    if not parser.convert_exists:
        raise ValueError("Function convert with 2 arguments not found in the code")
    exec(code, globals())
    return globals()['convert']


def parse_dynamic_load_code(code,func,args):
    if not code:
        return None

    class CodeParser(ast.NodeVisitor):
        def __init__(self):
            self.convert_exists = False

        def visit_FunctionDef(self, node):
            if node.name == func and len(node.args.args) == len(args):
                self.convert_exists = True

        def parse(self, code):
            self.visit(ast.parse(code))

    parser = CodeParser()
    parser.parse(code)
    if not parser.convert_exists:
        raise ValueError(f"Function {func} with {len(args)} arguments not found in the code")
    exec(code, globals())
    return globals()[func]




def parse_prompt_pre_code(code):
    if not code:
        return None

    class CodeParser(ast.NodeVisitor):
        def __init__(self):
            self.convert_exists = False

        def visit_FunctionDef(self, node):
            if node.name == 'pre' and len(node.args.args) == 2:
                self.convert_exists = True

        def parse(self, code):
            self.visit(ast.parse(code))

    parser = CodeParser()
    parser.parse(code)
    if not parser.convert_exists:
        raise ValueError("Function pre with 2 arguments not found in the code")
    exec(code, globals())
    return globals()['pre']


cache_type_map = {}
cache_eval_name_map = {}
cache_plan_name_map = {}
cache_group_map = {}
cache_source_to_node_map = {}
cache_node_to_source_map = {}
cache_embed_map = {}


def start_refresh_data_periodically():
    # 死循环刷新评测集
    def refresh_data_periodically():
        global cache_type_map, cache_eval_name_map, cache_plan_name_map, cache_group_map, cache_source_to_node_map, cache_node_to_source_map, cache_embed_map
        while True:
            try:
                cache_embed_map = embed_storage.distinct_groups()
                cache_group_map = node_data_storage.distinct_groups()
                cache_type_map = node_data_storage.distinct_types_by_script_name(list(script.get_script_schemas().keys()))
                cache_eval_name_map = eval_data_storage.distinct_eval_names()
                cache_plan_name_map = plan_storage.distinct_plan_names()
                cache_source_to_node_map, cache_node_to_source_map = node_data_storage.distinct_source_by_script_name()
            except Exception as e:
                util.log('refresh_cache_error', {
                    "error": e
                })
            # 暂停10秒
            time.sleep(10)

    # 创建并启动一个新线程
    t = threading.Thread(target=refresh_data_periodically)
    t.start()


@app.route('/load_script', methods=['GET'])
def load_script():
    cache = request.args.get('cache')
    if cache == 'true':
        group_map = cache_group_map
        type_map = cache_type_map
        eval_name_map = cache_eval_name_map
        plan_name_map = cache_plan_name_map
        source_to_node_map = cache_source_to_node_map
        node_to_source_map = cache_node_to_source_map
    else:
        with ThreadPoolExecutor(max_workers=4) as executor:
            future1 = executor.submit(node_data_storage.distinct_groups)
            future2 = executor.submit(node_data_storage.distinct_types_by_script_name, list(script.get_script_schemas().keys()))
            future3 = executor.submit(eval_data_storage.distinct_eval_names)
            future4 = executor.submit(plan_storage.distinct_plan_names)
            group_map = future1.result()
            type_map = future2.result()
            eval_name_map = future3.result()
            plan_name_map = future4.result()
            source_to_node_map = cache_source_to_node_map
            node_to_source_map = cache_node_to_source_map

    # 加载在执行列表
    def load_tasks(script_name, script):
        script = script.copy()
        filtered_tasks = [val.to_dict() for val in one_runtime.tasks.values() if val.script_name == script_name]
        eval_filtered_tasks = [val.to_dict() for val in one_runtime.eval_tasks.values() if val.script_name == script_name]
        script.update({
            'tasks': filtered_tasks,
            'eval_tasks': eval_filtered_tasks,
            'groups': group_map.get(script_name) if group_map.get(script_name) else {},
            'types': type_map.get(script_name) if type_map.get(script_name) else [],
            'source_to_nodes': source_to_node_map.get(script_name) if source_to_node_map.get(script_name) else {},
            'node_to_sources': node_to_source_map.get(script_name) if node_to_source_map.get(script_name) else {},
            'eval_names': eval_name_map.get(script_name) if eval_name_map.get(script_name) else {},
            'plan_names': plan_name_map.get(script_name) if plan_name_map.get(script_name) else {},
        })
        return script

    # 读取当前执行的
    ret = {key: load_tasks(key, val) for key, val in script.get_script_schemas().items()}
    return json.dumps(ret, ensure_ascii=False)


@app.route('/load_embed', methods=['GET'])
def load_embed():
    cache = request.args.get('cache')
    if cache == 'true':
        embed_map = cache_embed_map
    else:
        embed_map = embed_storage.distinct_groups()
    return json.dumps({
        'success': True,
        'data': embed_map
    }, ensure_ascii=False)


@app.route('/search_script_node_datas', methods=['GET'])
def select_nodes():
    data_group = request.args.get('data_group')
    content_type = request.args.get('content_type')
    node = request.args.get('node')
    script_name = request.args.get('script_name')
    search = request.args.get('search')
    filter = request.args.get('filter')
    time_range = request.args.get('time_range')
    if not data_group or not node or not script_name:
        raise ValueError('data_group 和 node script_name不可为空')

    begin_time = None
    end_time = None
    if time_range:
        if ' to ' in time_range:
            begin_time = time_range.split(' to ')[0]
            end_time = time_range.split(' to ')[1]
        else:
            begin_time = time_range

    filters = {
        'data_group': data_group,
        'node': node,
        'content_type': content_type,
        'search': search,
        'script_name': script_name,
        'content_not_null': None if filter == 'all' else filter == 'content_not_null',
        'begin_time': begin_time,
        'end_time': end_time
    }

    # 查总数
    total = node_data_storage.count_datas(filters=filters)
    if total == 0:
        return json.dumps({
            'total': 0,
            'rows': []
        })

    offset = int(request.args.get('offset', 0))
    size = int(request.args.get('size', 10))

    result = node_data_storage.select_datas_4_export(filters=filters, offset=offset, size=size)
    # formatted_result = [json.loads(item.to_json()) for item in result]
    return json.dumps({
        "total": total,
        "rows": result
    }, ensure_ascii=False, default=str)


@app.route('/search_embed_datas', methods=['GET'])
def search_embed_datas():
    data_group = request.args.get('data_group')
    embed_scope = request.args.get('embed_scope')
    search = request.args.get('search')
    embed = request.args.get('embed')
    filters = {
        'data_group': data_group,
        'embed_scope': embed_scope,
        "search": search
    }

    if embed and embed != 'all':
        filters['embed'] = True if embed == 'true' else False

    # 查总数
    total = embed_storage.count_embeds(filters=filters)
    if total == 0:
        return json.dumps({
            'total': 0,
            'rows': []
        })

    offset = int(request.args.get('offset', 0))
    size = int(request.args.get('size', 10))

    result = embed_storage.select_embeds(filters=filters, offset=offset, size=size)
    chroma_map = embed_storage.select_chroma(embed_scope=embed_scope, ids=[item.id for item in result])
    formatted_result = [{
        **json.loads(item.to_json()),
        'sync_chroma': chroma_map.get(str(item.id)) is not None and chroma_map.get(str(item.id)).get('gmt_modified') == item.gmt_modified if item.embed_array is not None else chroma_map.get(str(item.id)) is None
    } for item in result]
    return json.dumps({
        "total": total,
        "rows": formatted_result
    }, ensure_ascii=False)


@app.route('/select_script_eval_datas', methods=['GET'])
def select_script_eval_datas():
    script_name = request.args.get('script_name')
    source_node = request.args.get('source_node')
    source_group = request.args.get('source_group')
    eval_node = request.args.get('eval_node')
    eval_name = request.args.get('eval_name')
    search = request.args.get('search')
    if not script_name or not source_node or not source_group or not eval_node or not eval_name:
        raise ValueError('data_group 和 node script_name不可为空')

    filters = {
        'script_name': script_name,
        'source_node': source_node,
        'source_group': source_group,
        'eval_node': eval_node,
        'eval_name': eval_name,
        'search': search
    }

    # 查总数
    total = eval_data_storage.count_eval_datas(filters=filters)
    if total == 0:
        return json.dumps({
            'total': 0,
            'rows': []
        })

    offset = int(request.args.get('offset', 0))
    size = int(request.args.get('size', 10))

    result = eval_data_storage.select_eval_datas(filters=filters, offset=offset, size=size)
    formatted_result = [json.loads(item.to_json()) for item in result]
    return json.dumps({
        "total": total,
        "rows": formatted_result
    }, ensure_ascii=False)


@app.route('/select_script_node_plans', methods=['GET'])
def select_script_node_plans():
    script_name = request.args.get('script_name')
    node = request.args.get('node')
    # search = request.args.get('search')
    if not script_name or not node:
        raise ValueError('node script_name不可为空')

    filters = {
        'script_name': script_name,
        'node': node
    }

    result = plan_storage.select_plans(filters=filters)
    formatted_result = [json.loads(item.to_json()) for item in result]
    return json.dumps({  # "total": total,
        "rows": formatted_result
    }, ensure_ascii=False)


@app.route('/saveOrUpdatePlan', methods=['POST'])
def saveOrUpdatePlan():
    config = request.form.get('config')
    try:
        config = json.loads(config)
        if not isinstance(config, dict):
            return json.dumps({
                "success": False,
                "message": '配置必须为字典'
            }, ensure_ascii=False)
    except:
        return json.dumps({
            "success": False,
            "message": '方案配置必须是json'
        }, ensure_ascii=False)
    id = request.form.get('id')
    try:
        name = request.form.get('name')
        if id:
            update_cfg = {
                'config': config
            }
            if name:
                update_cfg['name'] = name
            plan_storage.update_plan_by_id(id, update_cfg)
            return json.dumps({
                "success": True
            }, ensure_ascii=False)

        # 新增
        script_name = request.form.get('script_name')
        node = request.form.get('node')
        if not script_name or not node or not name:
            return json.dumps({
                "success": False,
                "message": '脚本 名称不可为空'
            }, ensure_ascii=False)
        plan_storage.add_plan(ScriptNodePlan(script_name=script_name, node=node, name=name, online=False, config=config))
        return json.dumps({
            "success": True
        }, ensure_ascii=False)
    except:
        return json.dumps({
            "success": False,
            "message": '新增方案失败，方案名称不可重复'
        }, ensure_ascii=False)


@app.route('/saveOrUpdateEmbed', methods=['POST'])
def saveOrUpdateEmbed():
    tags = request.form.get('tags')
    keywords = request.form.get('keywords')
    embed_text = request.form.get('embed_text')
    content = request.form.get('content')
    if not embed_text:
        return json.dumps({
            "success": False,
            "message": 'embed_text 不可为空'
        }, ensure_ascii=False)

    try:
        if keywords:
            keywords = json.loads(keywords)
            if not isinstance(keywords, list):
                return json.dumps({
                    "success": False,
                    "message": 'keywords必须为列表'
                }, ensure_ascii=False)
        else:
            keywords = None

        if tags:
            tags = json.loads(tags)
            if not isinstance(tags, list):
                return json.dumps({
                    "success": False,
                    "message": 'tags必须为列表'
                }, ensure_ascii=False)
        else:
            tags = None

        if content:
            content = json.loads(content)
            if not isinstance(content, dict):
                return json.dumps({
                    "success": False,
                    "message": 'content必须为字典'
                }, ensure_ascii=False)
        else:
            content = None
    except:
        return json.dumps({
            "success": False,
            "message": '配置不规范'
        }, ensure_ascii=False)
    id = request.form.get('id')
    try:
        if id:
            embed_storage.update_embed_by_id(id, {
                'tags': tags,
                'keywords': keywords,
                'embed_text': embed_text,
                'content': content
            })
            return json.dumps({
                "success": True
            }, ensure_ascii=False)

        # 新增
        embed_scope = request.form.get('embed_scope')
        data_group = request.form.get('data_group')
        if not embed_scope or not data_group:
            return json.dumps({
                "success": False,
                'message': '向量域和数据集不可为空'
            }, ensure_ascii=False)
        # 同步到chroma有格式要求
        pattern = "^[a-zA-Z0-9]([a-zA-Z0-9_-]{1,61}[a-zA-Z0-9])?$"
        # 验证符合规则的名称
        if not re.match(pattern, embed_scope):
            return json.dumps({
                'success': False,
                'message': '''向量域不规范，- 长度在 3-63 字符之间
    - 开头和结束都是字母数字字符
    - 除此之外只包含字母数字字符、下划线或破折号(-)'''
            }, ensure_ascii=False)
        embed_storage.add_embeds([EmbedData(embed_scope=embed_scope, data_group=data_group, tags=tags, keywords=keywords, content=content, embed_text=embed_text)])
        return json.dumps({
            "success": True
        }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            "success": False,
            "message": '新增失败,' + str(e)
        }, ensure_ascii=False)


@app.route('/ops_script_plan', methods=['POST'])
def ops_script_plan():
    id = request.form.get('id')
    action = request.form.get('action')
    if not id or not action:
        return json.dumps({
            "success": False,
            "message": '方案id和操作不可为空'
        }, ensure_ascii=False)

    if action == 'delete':
        plan_storage.delete_plan(id)
    elif action == 'online':
        plan_storage.switch_plan(id, True)
    elif action == 'offline':
        plan_storage.switch_plan(id, False)
    else:
        return json.dumps({
            "success": False,
            "message": '未知操作类型'
        }, ensure_ascii=False)
    return json.dumps({
        "success": True,
        "message": 'success'
    }, ensure_ascii=False)


@app.route('/import_script_node_datas', methods=['POST'])
def import_script_node_datas():
    source = request.form.get('source')
    if source not in ('excel', 'odps'):
        return json.dumps({
            'success': False,
            'message': '不支持的来源'
        }, ensure_ascii=False)

    if source == 'excel' and 'excel_file' not in request.files:
        return json.dumps({
            'success': False,
            'message': '文件不存在'
        }, ensure_ascii=False)
    import_sql = request.form.get('import_sql')

    if source == 'odps' and not import_sql:
        return json.dumps({
            'success': False,
            'message': '导入sql必填'
        }, ensure_ascii=False)

    script_name = request.form.get('script_name')
    # node = request.form.get('node')
    data_group = request.form.get('data_group')

    if script_name not in script.get_script_schemas():
        return json.dumps({
            'success': False,
            'message': '脚本不存在'
        }, ensure_ascii=False)
    # if node != script_name and node not in script.get_script_schemas().get(script_name).get('functions'):
    #     return json.dumps({'success': False, 'message': '节点不存在'}, ensure_ascii=False)
    if not data_group:
        return json.dumps({
            'success': False,
            'message': '数据集不可为空'
        }, ensure_ascii=False)

    # 验证逻辑
    pre_check = request.form.get('pre_check', False, bool)
    remark = request.form.get('remark')
    if not remark:
        return json.dumps({
            'success': False,
            'message': '导入备注不可为空'
        }, ensure_ascii=False)

    try:
        if source == 'excel':
            file = request.files['excel_file']

            with tempfile.NamedTemporaryFile(suffix='.xlsx', delete=False) as temp_file:
                file.save(temp_file.name)

                # 节点数据

                # 向脚本的某个节点添加数据
                def add_node_datas_from_excel(script_name: str = None, data_group: str = None, file: str = None, remark: str = None, pre_check: bool = None):
                    if not file:
                        raise ValueError('excel不可为空')
                    if script_name not in script.get_script_schemas():
                        raise ValueError('脚本不存在')
                    # if node not in script.get_script_schemas().get(script_name).get(
                    #         'functions') and node != script_name:
                    #     raise ValueError('节点不存在')
                    if not data_group:
                        raise ValueError('数据集不可为空')
                    # Read excel file and convert it to a list of dictionaries
                    df = pd.read_excel(file, engine='openpyxl')
                    data_list = df.to_dict(orient='records')
                    # 将字典中的np.nan替换为None
                    for row in data_list:
                        for key, value in row.items():
                            if pd.isna(value) or pd.isnull(value):
                                row[key] = None
                    return node_data_storage.import_datas(data_group, data_list, pre_check, remark, script_name)

                data = add_node_datas_from_excel(script_name=script_name, data_group=data_group, file=temp_file.name, remark=remark, pre_check=pre_check)
                return json.dumps({
                    'success': True,
                    'message': '导入成功',
                    'data': data
                }, ensure_ascii=False)
        elif source == 'odps':
            # 节点数据
            # 向脚本的某个节点添加数据
            def add_node_datas_from_odps(script_name: str = None, data_group: str = None, import_sql: str = None, remark: str = None, pre_check: bool = None):
                if not import_sql:
                    raise ValueError('导入sql不可为空')
                if script_name not in script.get_script_schemas():
                    raise ValueError('脚本不存在')
                # if node not in script.get_script_schemas().get(script_name).get('functions') and node != script_name:
                #     raise ValueError('节点不存在')
                if not data_group:
                    raise ValueError('数据集不可为空')
                data_list = odps_storage.read(sql=import_sql)
                return node_data_storage.import_datas(data_group, data_list, pre_check, remark, script_name)

            data = add_node_datas_from_odps(script_name=script_name, data_group=data_group, import_sql=import_sql, remark=remark, pre_check=pre_check)
            return json.dumps({
                'success': True,
                'message': '导入成功',
                'data': data
            }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False)


@app.route('/import_embed_datas', methods=['POST'])
def import_embed_datas():
    source = request.form.get('source')
    if source not in ('excel', 'odps'):
        return json.dumps({
            'success': False,
            'message': '不支持的来源'
        }, ensure_ascii=False)

    if source == 'excel' and 'excel_file' not in request.files:
        return json.dumps({
            'success': False,
            'message': '文件不存在'
        }, ensure_ascii=False)
    import_sql = request.form.get('import_sql')

    if source == 'odps' and not import_sql:
        return json.dumps({
            'success': False,
            'message': '导入sql必填'
        }, ensure_ascii=False)

    embed_scope = request.form.get('embed_scope')
    data_group = request.form.get('data_group')

    if not embed_scope:
        return json.dumps({
            'success': False,
            'message': '域不存在'
        }, ensure_ascii=False)
    if not data_group:
        return json.dumps({
            'success': False,
            'message': '数据集不可为空'
        }, ensure_ascii=False)

    # 同步到chroma有格式要求
    pattern = "^[a-zA-Z0-9]([a-zA-Z0-9_-]{1,61}[a-zA-Z0-9])?$"
    # 验证符合规则的名称
    if not re.match(pattern, embed_scope):
        return json.dumps({
            'success': False,
            'message': '''向量域不规范，- 长度在 3-63 字符之间
- 开头和结束都是字母数字字符
- 除此之外只包含字母数字字符、下划线或破折号(-)'''
        }, ensure_ascii=False)

    # 验证逻辑
    pre_check = request.form.get('pre_check', False, bool)

    try:
        if source == 'excel':
            file = request.files['excel_file']

            with tempfile.NamedTemporaryFile(suffix='.xlsx', delete=False) as temp_file:
                file.save(temp_file.name)

                # 向脚本的某个节点添加数据
                def add_embed_datas_from_excel(embed_scope: str = None, data_group: str = None, file: str = None, pre_check: bool = None):
                    if not file:
                        raise ValueError('excel不可为空')
                    if not data_group or not embed_scope:
                        raise ValueError('向量域 和 数据集不可为空')
                    # Read excel file and convert it to a list of dictionaries
                    df = pd.read_excel(file, engine='openpyxl')
                    data_list = df.to_dict(orient='records')
                    # 将字典中的np.nan替换为None
                    for row in data_list:
                        for key, value in row.items():
                            if pd.isna(value) or pd.isnull(value):
                                row[key] = None
                    return embed_storage.import_embeds(data_group, data_list, pre_check, embed_scope)

                data = add_embed_datas_from_excel(embed_scope=embed_scope, data_group=data_group, file=temp_file.name, pre_check=pre_check)
                return json.dumps({
                    'success': True,
                    'message': '导入成功',
                    'data': data
                }, ensure_ascii=False)
        elif source == 'odps':
            # 节点数据
            # 向脚本的某个节点添加数据
            def add_embed_datas_from_odps(embed_scope: str = None, data_group: str = None, import_sql: str = None, pre_check: bool = None):
                if not import_sql:
                    raise ValueError('导入sql不可为空')
                if not data_group or not embed_scope:
                    raise ValueError('向量域 和 数据集不可为空')
                data_list = odps_storage.read(sql=import_sql)
                return embed_storage.import_embeds(data_group, data_list, pre_check, embed_scope)

            data = add_embed_datas_from_odps(embed_scope=embed_scope, data_group=data_group, import_sql=import_sql, pre_check=pre_check)
            return json.dumps({
                'success': True,
                'message': '导入成功',
                'data': data
            }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False)


@app.route('/export_script_node_datas', methods=['GET'])
def export_script_node_datas():
    data_group = request.args.get('data_group')
    content_type = request.args.get('content_type')
    node = request.args.get('node')
    script_name = request.args.get('script_name')
    search = request.args.get('search')
    filter = request.args.get('filter')
    time_range = request.args.get('time_range')
    point = request.args.get('point')
    pt = request.args.get('pt')
    if point not in ('odps', 'excel'):
        raise ValueError('导出目标不规范')
    if point in ['odps'] and not pt:
        raise ValueError('导出分区不可为空')

    if not data_group or not node or not script_name:
        raise ValueError('data_group 和 node script_name 不可为空')

    begin_time = None
    end_time = None
    if time_range:
        if ' to ' in time_range:
            begin_time = time_range.split(' to ')[0]
            end_time = time_range.split(' to ')[1]
        else:
            begin_time = time_range

    filters = {
        'data_group': data_group,
        'content_type': content_type,
        'node': node,
        'search': search,
        'script_name': script_name,
        'content_not_null': None if filter == 'all' else filter == 'content_not_null',
        'begin_time': begin_time,
        'end_time': end_time
    }

    offset = request.args.get('offset')
    size = request.args.get('size')

    if offset is not None and offset != 'NaN' and len(offset.strip()) > 0:
        offset = int(offset)
    else:
        offset = None
    if size is not None and offset != 'NaN' and len(size.strip()) > 0:
        size = int(size)
    else:
        size = None

    try:
        rows = node_data_storage.select_datas_4_export(filters=filters, offset=offset, size=size)
        # 将字典转换为JSON字符串
        for item in rows:
            for key, value in item.items():
                if isinstance(value, (dict, list)):
                    item[key] = json.dumps(value, ensure_ascii=False, default=str)
                elif isinstance(value, bool):
                    item[key] = str(value).lower()
                elif isinstance(value, (float, int)):
                    item[key] = "" if pd.isna(value) else str(value)

        if point == 'excel':
            num = len(rows)
            if num == 0:
                return jsonify({
                    'success': False,
                    'message': '数据量为0'
                }), 501

            curr_script = script.get_script_schemas().get(script_name)
            if not curr_script:
                return jsonify({
                    'success': False,
                    'message': '脚本不存在'
                }), 501
            functions = curr_script['functions']
            explain_sql = None if node == script_name else functions.get(node).get('explain_sql')
            headers = None
            if explain_sql:
                # 导出原始
                odps_storage.write_node_data(pt=pt, rows=rows)
                # 查询解释性sql
                select_result = odps_storage.select_node_data_with_explain(pt=pt, explain_sql=explain_sql)
                headers = select_result.get('headers')
                rows = select_result.get('rows')
            else:
                # 普通导出
                headers = ["id", "script_name", "node", "data_group", "content_type", "content_key", "content", "ext_info", "gmt_create", "req_id", "source_id", "source_node", "source_nodes", "source_data_group", "source_content_type", "source_content_key", "source_content", "source_ext_info"]

            df = pd.DataFrame(rows, columns=headers)
            for header in headers:
                # 将可能会被转换为科学计数法的列转换为字符串格式
                df[header] = df[header].fillna('').astype(str)

            # Write DataFrame to Excel and save it to a BytesIO buffer
            output = io.BytesIO()
            with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
                df.to_excel(writer, index=False)

            # Get the base64-encoded Excel data and create a Response with it
            excel_data = base64.b64encode(output.getvalue()).decode('utf-8')
            output.close()

            # Set Content-Disposition header with quoted filename
            filename = 'exported_data.xlsx'
            content_disposition = f'attachment; filename={quote(filename)}'

            return Response(base64.b64decode(excel_data), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', headers={
                'Content-Disposition': content_disposition
            })

        elif point == 'odps':
            num = len(rows)
            if num == 0:
                return json.dumps({
                    'success': False,
                    'message': '数据量为0'
                }, ensure_ascii=False)
            pre_check = request.args.get('pre_check')
            if pre_check == 'true':
                return json.dumps({
                    'success': True,
                    'data': {
                        'num': num
                    }
                })
            write_result = odps_storage.write_node_data(pt=pt, rows=rows)
            curr_script = script.get_script_schemas().get(script_name)
            if not curr_script:
                raise ValueError('脚本不存在')
            functions = curr_script['functions']
            explain_sql = None if node == script_name else functions.get(node).get('explain_sql')
            if explain_sql:
                explain_sql = explain_sql.replace('${pt}', f'{pt}')
            else:
                explain_sql = f'select * from {write_result.get("project")}.{write_result.get("table")} where pt=\'{pt}\';'
            return json.dumps({
                'success': True,
                'data': {
                    'num': len(rows),
                    'sql': explain_sql
                }
            })

        else:
            return json.dumps({
                'success': False,
                'message': '不支持的导出模式'
            }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False), 502


@app.route('/export_embed_datas', methods=['GET'])
def export_embed_datas():
    data_group = request.args.get('data_group')
    embed_scope = request.args.get('embed_scope')
    search = request.args.get('search')
    embed = request.args.get('embed')
    point = request.args.get('point')
    pt = request.args.get('pt')
    if point not in ('odps', 'excel'):
        raise ValueError('导出目标不规范')
    if point in ['odps'] and not pt:
        raise ValueError('导出分区不可为空')

    if not data_group or not embed_scope:
        raise ValueError('data_group 和 embed_scope 不可为空')

    filters = {
        'data_group': data_group,
        'embed_scope': embed_scope,
        'search': search,
    }
    if embed and embed != 'all':
        filters['embed'] = True if embed == 'true' else False

    try:
        result = embed_storage.select_embeds(filters=filters)

        rows = [item.to_dict() for item in result]
        # 将字典转换为JSON字符串
        for item in rows:
            for key, value in item.items():
                if isinstance(value, (dict, list)):
                    item[key] = json.dumps(value, ensure_ascii=False, default=str)
                elif isinstance(value, bool):
                    item[key] = str(value).lower()
                elif isinstance(value, (float, int)):
                    item[key] = "" if pd.isna(value) else str(value)

        if point == 'excel':
            num = len(rows)
            if num == 0:
                return jsonify({
                    'success': False,
                    'message': '数据量为0'
                }), 501

            # 普通导出
            headers = ['id', 'embed_scope', 'data_group', 'tags', 'keywords', 'embed_text', 'embed_array', 'content', 'gmt_modified']
            df = pd.DataFrame(rows, columns=headers)
            for header in headers:
                # 将可能会被转换为科学计数法的列转换为字符串格式
                df[header] = df[header].fillna('').astype(str)

            # Write DataFrame to Excel and save it to a BytesIO buffer
            output = io.BytesIO()
            with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
                df.to_excel(writer, index=False)

            # Get the base64-encoded Excel data and create a Response with it
            excel_data = base64.b64encode(output.getvalue()).decode('utf-8')
            output.close()

            # Set Content-Disposition header with quoted filename
            filename = 'exported_data.xlsx'
            content_disposition = f'attachment; filename={quote(filename)}'

            return Response(base64.b64decode(excel_data), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', headers={
                'Content-Disposition': content_disposition
            })
        else:
            return json.dumps({
                'success': False,
                'message': '不支持的导出模式'
            }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False), 502


@app.route('/export_script_eval_datas', methods=['GET'])
def export_script_eval_datas():
    script_name = request.args.get('script_name')
    source_node = request.args.get('source_node')
    eval_node = request.args.get('eval_node')
    source_group = request.args.get('source_group')
    eval_name = request.args.get('eval_name')
    search = request.args.get('search')
    point = request.args.get('point')
    pt = request.args.get('pt')
    if point not in ('odps', 'excel'):
        raise ValueError('导出目标不规范')
    if point in ['odps'] and not pt:
        raise ValueError('导出分区不可为空')

    if not source_node or not source_group or not script_name or not eval_node or not eval_name:
        raise ValueError('source_group 和 source_node 和 eval_node script_name 不可为空')

    filters = {
        'script_name': script_name,
        'source_node': source_node,
        'source_group': source_group,
        'eval_node': eval_node,
        'eval_name': eval_name,
        'search': search,
    }
    try:
        result = eval_data_storage.select_eval_datas(filters=filters)

        rows = [item.to_dict() for item in result]
        # 将字典转换为JSON字符串
        for item in rows:
            for key, value in item.items():
                if isinstance(value, (dict, list)):
                    item[key] = json.dumps(value, ensure_ascii=False, default=str)
                elif isinstance(value, bool):
                    item[key] = str(value).lower()
                elif isinstance(value, (float, int)):
                    item[key] = "" if pd.isna(value) else str(value)

        if point == 'excel':
            num = len(rows)
            if num == 0:
                return jsonify({
                    'success': False,
                    'message': '数据量为0'
                }), 501

            curr_script = script.get_script_schemas().get(script_name)
            if not curr_script:
                return jsonify({
                    'success': False,
                    'message': '脚本不存在'
                }), 501
            # functions = curr_script['functions']
            headers = ['id', 'script_name', 'source_node', 'source_group', 'eval_node', 'eval_name', 'source_id', 'source_content_type', 'source_content', 'req_id', 'eval_datas', 'results', 'ext_info', 'gmt_create']

            df = pd.DataFrame(rows, columns=headers)
            for header in headers:
                # 将可能会被转换为科学计数法的列转换为字符串格式
                df[header] = df[header].fillna('').astype(str)

            # Write DataFrame to Excel and save it to a BytesIO buffer
            output = io.BytesIO()
            with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
                df.to_excel(writer, index=False)

            # Get the base64-encoded Excel data and create a Response with it
            excel_data = base64.b64encode(output.getvalue()).decode('utf-8')
            output.close()

            # Set Content-Disposition header with quoted filename
            filename = 'exported_data.xlsx'
            content_disposition = f'attachment; filename={quote(filename)}'

            return Response(base64.b64decode(excel_data), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', headers={
                'Content-Disposition': content_disposition
            })

        elif point == 'odps':
            raise ValueError('待实现')
        else:
            return json.dumps({
                'success': False,
                'message': '不支持的导出模式'
            }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False), 502


@app.route('/export_script_eval_infos', methods=['GET'])
def export_script_eval_infos():
    script_name = request.args.get('script_name')
    source_node = request.args.get('source_node')
    eval_node = request.args.get('eval_node')
    source_group = request.args.get('source_group')
    data_groups = request.args.get('data_groups')
    point = request.args.get('point')
    pt = request.args.get('pt')
    if point not in ('odps', 'excel'):
        raise ValueError('导出目标不规范')
    if point in ['odps'] and not pt:
        raise ValueError('导出分区不可为空')

    if not source_node or not source_group or not script_name or not eval_node:
        raise ValueError('source_group 和 source_node 和 eval_node script_name 不可为空')

    try:
        result = eval_data_storage.select_need_eval_datas(filters={
            'script_name': script_name,
            'source_node': source_node,
            'source_group': source_group,
            'eval_node': eval_node,
            'data_groups': data_groups,
            'eval_name': pt
        })

        rows = [item.to_dict() for item in result]
        # 将字典转换为JSON字符串
        for item in rows:
            for key, value in item.items():
                if isinstance(value, (dict, list)):
                    item[key] = json.dumps(value, ensure_ascii=False, default=str)
                elif isinstance(value, bool):
                    item[key] = str(value).lower()
                elif isinstance(value, (float, int)):
                    item[key] = "" if pd.isna(value) else str(value)

        if point == 'excel':
            num = len(rows)
            if num == 0:
                return jsonify({
                    'success': False,
                    'message': '数据量为0'
                }), 501
            headers = ['source_id', 'script_name', 'source_node', 'source_group', 'source_content_type', 'source_content', 'eval_node', 'eval_datas']
            df = pd.DataFrame(rows, columns=headers)
            for header in headers:
                # 将可能会被转换为科学计数法的列转换为字符串格式
                df[header] = df[header].fillna('').astype(str)

            # Write DataFrame to Excel and save it to a BytesIO buffer
            output = io.BytesIO()
            with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
                df.to_excel(writer, index=False)

            # Get the base64-encoded Excel data and create a Response with it
            excel_data = base64.b64encode(output.getvalue()).decode('utf-8')
            output.close()

            # Set Content-Disposition header with quoted filename
            filename = 'exported_data.xlsx'
            content_disposition = f'attachment; filename={quote(filename)}'

            return Response(base64.b64decode(excel_data), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', headers={
                'Content-Disposition': content_disposition
            })

        elif point == 'odps':
            raise ValueError('待实现')
        else:
            return json.dumps({
                'success': False,
                'message': '不支持的导出模式'
            }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False), 502


@app.route('/create_script_node_task', methods=['POST'])
def create_script_node_task():
    try:
        task_name = request.form.get('task_name')
        script_name = request.form.get('script_name')
        node = request.form.get('node')
        data_group = request.form.get('data_group')
        source_type = request.form.get('source_type')
        source_group = request.form.get('source_group')
        search = request.form.get('search')
        source_node = request.form.get('source_node')
        offset = request.form.get('offset')
        time_range = request.form.get('time_range')
        spread_nodes = request.form.get('spread_nodes')
        test_name = request.form.get('test_name')
        debug_param = request.form.get('debug_param')
        plan_id = request.form.get('plan_id')
        if offset:
            offset = int(offset)
        size = request.form.get('size')
        if size:
            size = int(size)
        pre_check = request.form.get('pre_check')

        begin_time = None
        end_time = None
        if time_range:
            if ' to ' in time_range:
                begin_time = time_range.split(' to ')[0]
                end_time = time_range.split(' to ')[1]
            else:
                begin_time = time_range

        plan_config = {}
        if plan_id == 'online':
            plan = plan_storage.select_online_plan(script_name, node)
            if plan:
                plan_config = plan.config
        elif plan_id:
            plan = plan_storage.select_plan_by_id(plan_id)
            if not plan or plan.script_name != script_name or plan.node != node:
                raise ValueError('配置方案不存在或不匹配')
            plan_config = plan.config

        task = task_runner.create_node_task(name=task_name, script_name=script_name, node=node, data_group=data_group, wait=False, pre_check=pre_check == 'true', source={
            'source_group': source_group,
            'source_node': source_node,
            'source_type': source_type,
            'spread_nodes': spread_nodes,
            'offset': offset,
            'size': size,
            'search': search,
            'begin_time': begin_time,
            'end_time': end_time,
            'test_name': test_name,
            'debug_param': debug_param,
            'plan_config': plan_config
        })
        return json.dumps({
            "success": True,
            'num': task.num
        }, ensure_ascii=False, default=str)
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e)
        }, ensure_ascii=False, default=str)


@app.route('/re_create_bupao_task', methods=['POST'])
def re_create_bupao_task():
    try:
        task_type = request.form.get('task_type')
        task_name = request.form.get('task_name')
        pre_check = request.form.get('pre_check')
        if task_type == 'node_task':
            task: NodeTask = one_runtime.tasks.get(task_name)
            if not task or task.status != 'complete':
                return json.dumps({
                    "success": False,
                    'message': '源任务不存在或还未完成'
                }, ensure_ascii=False, default=str)

            new_task = task_runner.create_node_task(name=task.name + '_bupao', script_name=task.script_name, node=task.node, data_group=task.data_group, source=task.source, wait=False, pre_check=pre_check == 'true')
            return json.dumps({
                "success": True,
                'num': new_task.num
            }, ensure_ascii=False, default=str)
        elif task_type == 'eval_task':
            task: EvalTask = one_runtime.eval_tasks.get(task_name)
            if not task or task.status != 'complete':
                return json.dumps({
                    "success": False,
                    'message': '补跑任务不存在或还未完成'
                }, ensure_ascii=False, default=str)
            new_task = task_runner.create_eval_task(name=task.name + '_bupao', script_name=task.script_name, eval_node=task.eval_node, eval_name=task.eval_name, source=task.source, wait=False, pre_check=pre_check == 'true')
            return json.dumps({
                "success": True,
                'num': new_task.num
            }, ensure_ascii=False, default=str)
        else:
            return json.dumps({
                "success": False,
                'message': '不支持的任务类型'
            }, ensure_ascii=False, default=str)

    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e)
        }, ensure_ascii=False, default=str)


@app.route('/update_task_worker_num', methods=['POST'])
def update_task_worker_num():
    try:
        task_type = request.form.get('task_type')
        task_name = request.form.get('task_name')
        worker_num = request.form.get('worker_num')
        if not task_type or not task_name or not worker_num:
            return json.dumps({
                "success": False,
                'message': '参数不全'
            }, ensure_ascii=False, default=str)
        worker_num = int(worker_num)
        if worker_num < 1 or worker_num > 20:
            return json.dumps({
                "success": False,
                'message': '队列数可选范围是1-20'
            }, ensure_ascii=False, default=str)
        if task_type == 'node_task':
            task: NodeTask = one_runtime.tasks.get(task_name)
            # 修改source中的worker_num
            task.source['worker_num'] = worker_num
            task.update_workers(worker_num)
        else:
            return json.dumps({
                "success": False,
                'message': '不支持的任务类型'
            }, ensure_ascii=False, default=str)

        return json.dumps({
            "success": True
        }, ensure_ascii=False, default=str)
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e)
        }, ensure_ascii=False, default=str)


@app.route('/script_node_test', methods=['POST'])
def script_node_test():
    test_id = ''
    try:
        script_name = request.form.get('script_name')
        node = request.form.get('node')
        source_id = request.form.get('source_id')
        plan_id = request.form.get('plan_id')
        test_name = request.form.get('test_name')
        debug_param = request.form.get('debug_param')
        if not script_name or not node or not source_id:
            raise ValueError('配置不全')

        test_source = ScriptNodeData.from_id(source_id)
        if not test_source:
            raise ValueError('样本不存在')
        plan_config = {}

        if plan_id == 'online':
            plan = plan_storage.select_online_plan(script_name, node)
            if plan:
                plan_config = plan.config
        elif plan_id:
            plan = plan_storage.select_plan_by_id(plan_id)
            if not plan or plan.script_name != script_name or plan.node != node:
                raise ValueError('配置方案不存在或不匹配')
            plan_config = plan.config

        test_id = str(time.time())
        util.log_local.run_data = {
            "test_id": test_id
        }
        test_ctx = script.NodeTaskCtx(task_name='task_test', task_source={}, script_name=script_name, node=node, data_group='task_test', plan_config=plan_config, req_id=test_id, test_name=test_name, debug_param=debug_param)
        module = script.get_script_modules().get(script_name)
        if not module:
            #     脚本不存在，可能是被删除了或者改名了。
            raise ValueError('module不存在，可能被删除了')
        # Get the function from the module
        function = getattr(module, node)
        # Call the function with the provided data and return the result
        test_result = function(test_source, test_ctx)
        if isinstance(test_result, script.NodeResult):
            test_result = test_result.to_dict()
        elif isinstance(test_result, list):
            test_result = [item if isinstance(item, dict) else item.to_dict() for item in test_result]
        return json.dumps({
            "success": True,
            "data": test_result,
            'test_id': test_id
        })
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e),
            'test_id': test_id
        }, ensure_ascii=False, default=str)
    finally:
        # 清理线程局部变量
        if hasattr(util.log_local, "run_data"):
            del util.log_local.run_data


@app.route('/script_eval_test', methods=['POST'])
def script_eval_test():
    test_id = ''
    try:
        id = request.form.get('id')
        if not id:
            raise ValueError('评测id不可为空')
        eval_data = node_data_storage.select_data_by_id(id)
        if not eval_data:
            raise ValueError('评测数据不可为空')

        source_data = node_data_storage.select_data_by_id(eval_data.source_id)
        if not source_data:
            raise ValueError('未发现上游数据')

        schema = script.get_script_schemas().get(eval_data.script_name)
        if not schema:
            #     脚本不存在，可能是被删除了或者改名了。
            raise ValueError('脚本不存在，可能被删除了')
        module = script.get_script_modules().get(eval_data.script_name)
        if not module:
            #     脚本不存在，可能是被删除了或者改名了。
            raise ValueError('脚本不存在，可能被删除了')
        # Get the function from the module
        functions = schema['functions']
        node_func = functions.get(eval_data.node)
        if not node_func:
            raise ValueError('节点不存在')
        eval_func = node_func.get('eval_func')
        if not eval_func:
            raise ValueError('节点未配置评测函数')

        eval_datas = {
            eval_data.data_group: [script.EvalData(id=eval_data.id, data_group=eval_data.data_group, content_type=eval_data.content_type, content=eval_data.content, ext_info=eval_data.ext_info)]
        }
        data = script.EvalInfo(eval_name='eval_test', script_name=eval_data.script_name, source_node=source_data.node, source_group=source_data.data_group, source_id=source_data.id, source_content_type=source_data.content_type, source_content=source_data.content, eval_node=eval_data.node,
                               eval_datas=eval_datas)
        ctx = script.EvalTaskCtx(task_name='eval_test', eval_name='eval_test', req_id=test_id)
        test_id = str(time.time())
        util.log_local.run_data = {
            "test_id": test_id
        }
        eval_result = getattr(module, eval_func)(data, ctx)
        data = {}
        if not eval_result:
            pass
        elif isinstance(eval_result, dict):
            data = script.EvalResult(results=eval_result.get('results'), ext_info=eval_result.get('ext_info')).to_dict()
        else:
            data = script.EvalResult(results=eval_result.results, ext_info=eval_result.ext_info).to_dict()
        return json.dumps({
            "success": True,
            "data": data,
            'test_id': test_id
        })
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e),
            'test_id': test_id
        }, ensure_ascii=False, default=str)
    finally:
        # 清理线程局部变量
        if hasattr(util.log_local, "run_data"):
            del util.log_local.run_data


@app.route('/embed_recall_test', methods=['POST'])
def embed_recall_test():
    test_id = ''
    try:
        selector = request.form.get('selector')
        size = request.form.get('size')
        selector_param = request.form.get('selector_param')
        embed_scope = request.form.get('embed_scope')
        data_group = request.form.get('data_group')
        query_text = request.form.get('query_text')
        selector_cls = one_runtime.selects.get(selector)
        if not embed_scope or not data_group or not query_text or not selector_cls:
            raise ValueError('向量域 和 数据集 和 查询文本 不可为空 或 选择器不存在')
        test_id = str(time.time())
        util.log_local.run_data = {
            "test_id": test_id
        }
        size = int(size) if size else None
        param = json.loads(selector_param) if selector_param else {}
        search_res = selector_cls(embed_scope=embed_scope, data_group=data_group.split(',')).select(query_texts=query_text, size=size, param=param)

        search_res.check_success()
        return json.dumps({
            "success": True,
            "data": search_res.to_dict().get('result'),
            'test_id': test_id
        })
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e),
            'test_id': test_id
        }, ensure_ascii=False, default=str)
    finally:
        # 清理线程局部变量
        if hasattr(util.log_local, "run_data"):
            del util.log_local.run_data


@app.route('/create_script_eval_task', methods=['POST'])
def create_script_eval_task():
    try:
        task_name = request.form.get('task_name')
        script_name = request.form.get('script_name')
        eval_name = request.form.get('eval_name')
        eval_node = request.form.get('eval_node')
        source_node = request.form.get('source_node')
        source_group = request.form.get('source_group')
        data_groups = request.form.get('data_groups')
        pre_check = request.form.get('pre_check')

        task = task_runner.create_eval_task(name=task_name, script_name=script_name, eval_node=eval_node, eval_name=eval_name, wait=False, pre_check=pre_check == 'true', source={
            'data_groups': data_groups,
            'source_node': source_node,
            'source_group': source_group
        })
        return json.dumps({
            "success": True,
            'num': task.num
        }, ensure_ascii=False, default=str)
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': str(e)
        }, ensure_ascii=False, default=str)


@app.route('/change_script_node_task_status', methods=['POST'])
def change_script_node_task_status():
    req = request.get_json()

    status = req.get('status')
    if status not in ('on', 'off', 'delete'):
        return json.dumps({
            "success": False,
            'message': '状态不规范'
        }, ensure_ascii=False)

    task_type = req.get('task_type')
    task_name = req.get('task_name')

    if task_type == 'node_task':
        if not task_name or task_name not in one_runtime.tasks:
            return json.dumps({
                "success": False,
                'message': '任务不存在'
            }, ensure_ascii=False)
        task: NodeTask = one_runtime.tasks.get(task_name)

        if status == 'delete':
            task.status = status
            del one_runtime.tasks[task_name]
        else:
            if task.status == 'complete':
                return json.dumps({
                    "success": False,
                    'message': '任务已结束'
                }, ensure_ascii=False)
            task.status = status
        return json.dumps({
            "success": True,
            'message': '操作成功'
        }, ensure_ascii=False)
    elif task_type == 'eval_task':
        if not task_name or task_name not in one_runtime.eval_tasks:
            return json.dumps({
                "success": False,
                'message': '任务不存在'
            }, ensure_ascii=False)
        task: EvalTask = one_runtime.eval_tasks.get(task_name)

        if status == 'delete':
            task.status = status
            del one_runtime.eval_tasks[task_name]
        else:
            if task.status == 'complete':
                return json.dumps({
                    "success": False,
                    'message': '任务已结束'
                }, ensure_ascii=False)
            task.status = status
        return json.dumps({
            "success": True,
            'message': '操作成功'
        }, ensure_ascii=False)
    else:
        return json.dumps({
            "success": False,
            'message': '未知任务类型'
        }, ensure_ascii=False)


@app.route('/delete_node_data', methods=['POST'])
def delete_node_data():
    req = request.get_json()
    node = req.get('node')
    if not node:
        return json.dumps({
            "success": False,
            'message': '节点为空'
        }, ensure_ascii=False)
    id = req.get('id')
    if id:
        node_data_storage.delete_data_by_ids([id])
        # 删除下游
        dele_next_nodes = f"DELETE FROM script_node_data WHERE JSON_EXTRACT(source_nodes, '$.\"{node}\"') = {id}"
        node_data_storage.execute_sql(dele_next_nodes)

        # 删除无效调用关联
        # call_record_storage.clean_records()
        return json.dumps({
            "success": True,
            'data': 1
        }, ensure_ascii=False)

    data_group = req.get('data_group')
    content_type = req.get('content_type')
    script_name = req.get('script_name')
    search = req.get('search')
    time_range = req.get('time_range')
    filter = req.get('filter')
    if not data_group or not script_name:
        return json.dumps({
            "success": False,
            'message': 'data_group 和 node script_name不可为空'
        }, ensure_ascii=False)

    begin_time = None
    end_time = None
    if time_range:
        if ' to ' in time_range:
            begin_time = time_range.split(' to ')[0]
            end_time = time_range.split(' to ')[1]
        else:
            begin_time = time_range
    filters = {
        'data_group': data_group,
        'content_type': content_type,
        'node': node,
        'search': search,
        'script_name': script_name,
        'content_not_null': None if filter == 'all' else filter == 'content_not_null',
        'begin_time': begin_time,
        'end_time': end_time
    }
    # 验证直接返回
    rows = node_data_storage.select_datas(filters=filters)

    if len(rows) == 0:
        return json.dumps({
            "success": True,
            'data': len(rows)
        }, ensure_ascii=False)

    if req.get('pre_check', bool) == True:
        return json.dumps({
            "success": True,
            'data': len(rows)
        }, ensure_ascii=False)

    node_data_storage.delete_datas(filters=filters)

    # 删除下游
    ids = [item.id for item in rows]

    def delete_ids(del_ids):
        # 将 ids 列表分割成每个包含 1000 个 id 的小块
        ids_chunks = [del_ids[i:i + 1000] for i in range(0, len(del_ids), 1000)]
        for ids_chunk in ids_chunks:
            try:
                # 将 id 列表转换为字符串
                ids_str = ','.join(map(str, ids_chunk))
                # 创建 SQL 命令
                dele_next_nodes = f"DELETE FROM script_node_data WHERE JSON_EXTRACT(source_nodes, '$.\"{node}\"') in ({ids_str})"
                # 执行 SQL 命令
                node_data_storage.execute_sql(dele_next_nodes)
            except Exception as e:
                print('删除下游失败', str(e))

    # 创建一个新的线程来执行 delete_ids 函数
    thread = threading.Thread(target=delete_ids, args=(ids,))
    # 启动线程
    thread.start()

    return json.dumps({
        "success": True,
        'data': len(rows)
    }, ensure_ascii=False)


@app.route('/delete_eval_data', methods=['POST'])
def delete_eval_data():
    req = request.get_json()
    id = req.get('id')
    if id:
        eval_data_storage.delete_eval_by_ids([id])
        return json.dumps({
            "success": True,
            'data': 1
        }, ensure_ascii=False)

    source_node = req.get('source_node')
    source_group = req.get('source_group')
    script_name = req.get('script_name')
    eval_name = req.get('eval_name')
    search = req.get('search')
    eval_node = req.get('eval_node')
    if not eval_node:
        return json.dumps({
            "success": False,
            'message': '评测节点为空'
        }, ensure_ascii=False)
    if not source_node or not source_group or not eval_name or not script_name:
        return json.dumps({
            "success": False,
            'message': 'source_node 和 source_group 和 eval_name script_name不可为空'
        }, ensure_ascii=False)

    filters = {
        'script_name': script_name,
        'source_node': source_node,
        'eval_node': eval_node,
        'search': search,
        'source_group': source_group,
        'eval_name': eval_name
    }
    # 验证直接返回
    if req.get('pre_check', bool) == True:
        rows = eval_data_storage.select_eval_datas(filters=filters)
        return json.dumps({
            "success": True,
            'data': len(rows)
        }, ensure_ascii=False)

    del_num = eval_data_storage.delete_eval_datas(filters=filters)
    return json.dumps({
        "success": True,
        'data': del_num
    }, ensure_ascii=False)


@app.route('/delete_embed_data', methods=['POST'])
def delete_embed_data():
    req = request.get_json()
    id = req.get('id')
    if id:
        embed_storage.delete_by_id(id)
        return json.dumps({
            "success": True,
            'data': 1
        }, ensure_ascii=False)

    embed_scope = req.get('embed_scope')
    data_group = req.get('data_group')
    search = req.get('search')
    embed = req.get('embed')
    if not embed_scope or not data_group:
        return json.dumps({
            "success": False,
            'message': 'embed_scope data_group不可为空'
        }, ensure_ascii=False)

    filters = {
        'embed_scope': embed_scope,
        'data_group': data_group,
        'search': search,
    }
    if embed and embed != 'all':
        filters['embed'] = True if embed == 'true' else False
    # 验证直接返回
    if req.get('pre_check', bool) == True:
        rows = embed_storage.select_embeds(filters=filters)
        return json.dumps({
            "success": True,
            'data': len(rows)
        }, ensure_ascii=False)

    del_num = embed_storage.delete_embeds(filters=filters)
    return json.dumps({
        "success": True,
        'data': del_num
    }, ensure_ascii=False)


@app.route('/destroy_chroma', methods=['POST'])
def destroy_chroma():
    req = request.get_json()
    embed_scope = req.get('embed_scope')
    if not embed_scope:
        return json.dumps({
            "success": False,
            'message': 'embed_scope不可为空'
        }, ensure_ascii=False)
    try:
        embed_storage.destroy_chroma(embed_scope)
        return json.dumps({
            "success": True
        }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            "success": False,
            'message': '销毁失败,' + str(e)
        }, ensure_ascii=False)


@app.route('/embedding_data', methods=['POST'])
def embedding_data():
    req = request.get_json()
    id = req.get('id')
    if id:
        embed_storage.embedding_with_id(id)
        return json.dumps({
            "success": True,
            'data': 1
        }, ensure_ascii=False)

    embed_scope = req.get('embed_scope')
    data_group = req.get('data_group')
    search = req.get('search')
    embed = req.get('embed')
    if not embed_scope or not data_group:
        return json.dumps({
            "success": False,
            'message': 'embed_scope data_group不可为空'
        }, ensure_ascii=False)

    filters = {
        'embed_scope': embed_scope,
        'data_group': data_group,
        'search': search
    }

    if embed and embed != 'all':
        filters['embed'] = True if embed == 'true' else False

    # 验证直接返回
    if req.get('pre_check', bool) == True:
        rows = embed_storage.select_embeds(filters=filters)
        return json.dumps({
            "success": True,
            'data': len(rows)
        }, ensure_ascii=False)
    rows = embed_storage.select_embeds(filters=filters)

    # 创建并启动一个新线程,异步执行
    t = threading.Thread(target=lambda: embed_storage.embedding_with_filters(filters=filters), daemon=True)
    t.start()

    return json.dumps({
        "success": True,
        'data': len(rows)
    }, ensure_ascii=False)


@app.route('/sync_chroma', methods=['POST'])
def sync_chroma():
    req = request.get_json()
    id = req.get('id')
    if id:
        embed_storage.sync_chroma_with_id(id)
        return json.dumps({
            "success": True,
            'data': 1
        }, ensure_ascii=False)

    embed_scope = req.get('embed_scope')
    data_group = req.get('data_group')
    if not embed_scope or not data_group:
        return json.dumps({
            "success": False,
            'message': 'embed_scope data_group不可为空'
        }, ensure_ascii=False)

    # 验证直接返回
    if req.get('pre_check', bool) == True:
        filters = {
            'embed_scope': embed_scope,
            'data_group': data_group,
            'embed': True
        }
        rows = embed_storage.select_embeds(filters=filters)
        return json.dumps({
            "success": True,
            'data': len(rows)
        }, ensure_ascii=False)
    rows = embed_storage.sync_chroma_with_data_group(embed_scope=embed_scope, data_group=data_group)
    return json.dumps({
        "success": True,
        'data': len(rows)
    }, ensure_ascii=False)


@app.route('/search_next_data', methods=['GET'])
def search_next_data():
    id = request.args.get('id')
    if not id:
        return json.dumps({
            "success": False,
            'message': 'id为空'
        }, ensure_ascii=False)
    data = node_data_storage.select_data_by_id(id)
    if not data:
        return json.dumps({
            "success": True,
            'data': []
        }, ensure_ascii=False)

    datas = []
    # 不要全链路的，只要自己的上游
    # if not data.source_nodes:
    #     root_key = data.node
    #     root_val = data.id
    # else:
    #     sorted_items = sorted(data.source_nodes.items(), key=lambda item: item[1])
    #     root_key, root_val = sorted_items[0]
    #
    # datas: list[ScriptNodeData] = node_data_storage.select_datas(filters={
    #     'root_key': root_key, 'root_val': root_val, 'script_name': data.script_name
    # })
    if not data.source_nodes:
        datas = [data]
    else:
        datas: list[ScriptNodeData] = node_data_storage.select_datas(filters={
            'ids': [item for item in data.source_nodes.values()]
        })
        datas.append(data)

    datas = [json.loads(item.to_json()) for item in datas]

    # 查询调用信息
    all_data_ids = [item.get('id') for item in datas]
    records = call_record_storage.select_records(filters={
        'source_ids': all_data_ids
    })

    # 嵌入记录信息，如何产生的我
    resource_record = defaultdict(list)
    for record in records:
        resource_record[record.source_id].append(json.loads(record.to_json()))

    # Loop through the a list and add the associated records from resource_record
    for item in datas:
        source_id = item["source_id"]
        if source_id in resource_record:
            item["records"] = [re for re in resource_record[source_id] if item.get('req_id') == re.get('req_id')]

    # 数组根据source_id 构建成树
    def build_tree(data):
        tree = {}
        # 使用字典将id与元素关联起来
        id_dict = {item["id"]: item for item in data}
        for item in data:
            source_id = item["source_id"]
            if source_id is None:
                tree = item
            else:
                parent = id_dict.get(source_id)
                if parent is not None:
                    if "children" not in parent:
                        parent["children"] = []
                    parent["children"].append(item)
        return tree

    return json.dumps({
        "success": True,
        'data': build_tree(datas)
    }, ensure_ascii=False)


@app.route('/reload_scripts', methods=['GET'])
def refresh_code():
    try:
        one_runtime.refresh_code()
        for key, task in one_runtime.tasks.items():
            try:
                curr_script = script.get_script_schemas().get(task.script_name)
                if not curr_script:
                    raise ValueError('脚本不存在')
                worker_num = task.source.get('worker_num')
                if not worker_num:
                    functions = curr_script['functions']
                    node_func = functions.get(task.node)
                    if not node_func:
                        raise ValueError('节点不存在')
                    # 推荐size
                    worker_num = node_func.get('suggest_worker_num')
                    worker_num = 1 if not worker_num or worker_num < 1 else worker_num
                task.update_workers(worker_num)
            except Exception as e:
                util.log('refresh_worker_error', {
                    'error': e,
                    'task_name': key,
                    'script_name': task.script_name,
                    'node': task.node
                })

        return json.dumps({
            'success': True
        })
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        })


@app.route('/get_tasks', methods=['GET'])
def get_tasks():
    try:
        tasks = {key: val.to_dict() for key, val in one_runtime.tasks.items()}
        for val in tasks.values():
            if 'monitor' in val:
                del val['monitor']
        eval_tasks = {key: val.to_dict() for key, val in one_runtime.eval_tasks.items()}
        for val in eval_tasks.values():
            if 'monitor' in val:
                del val['monitor']
        util.log('get_tasks', {
            'tasks': [tasks, eval_tasks]
        })
        return json.dumps({
            'success': True,
            'data': [tasks, eval_tasks]
        }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        })


@app.route('/readd_tasks', methods=['POST'])
def readd_tasks():
    try:
        tasks = request.get_json()
        print(json.dumps(tasks, ensure_ascii=False))
        if not tasks:
            return json.dumps({
                'success': True,
                'data': {}
            }, ensure_ascii=False)
        ret = {}
        for item in tasks:
            for name, task in item.items():
                try:
                    if task.get('status') == 'delete':
                        continue
                    task_type = task.get('task_type')
                    if task_type == 'node_task':
                        new_task = task_runner.create_node_task(name=name, script_name=task.get('script_name'), node=task.get('node'), data_group=task.get('data_group'), wait=False, pre_check=False, source=task.get('source'))
                        new_task.status = task.get('status')
                        ret.update({
                            name: new_task.to_dict()
                        })
                    elif task_type == 'eval_task':
                        new_task = task_runner.create_eval_task(name=name, script_name=task.get('script_name'), eval_node=task.get('eval_node'), eval_name=task.get('eval_name'), wait=False, pre_check=False, source=task.get('source'))
                        new_task.status = task.get('status')
                        ret.update({
                            name: new_task.to_dict()
                        })
                    else:
                        raise ValueError('未知的任务类型')
                except Exception as e:
                    ret.update({
                        name: '重启异常' + str(e)
                    })
        return json.dumps({
            'success': True,
            'data': ret
        }, ensure_ascii=False)
    except Exception as e:
        return json.dumps({
            'success': False,
            'message': str(e)
        }, ensure_ascii=False)


@app.route('/restart', methods=['POST'])
def restart():
    try:
        tasks = {key: val.to_dict() for key, val in one_runtime.tasks.items()}  # 获取任务信息
        script_path = os.path.dirname(os.path.abspath(__file__)) + '/../restart.sh'
        log_file_path = os.path.join(os.path.dirname(script_path), 'logs/restart.log')
        with open(log_file_path, 'a') as f:
            process = subprocess.Popen(['sh', script_path], stdout=f, stderr=subprocess.STDOUT)
        return jsonify(success=True, data=tasks)
    except Exception as e:
        return jsonify(success=False, message=str(e))


@app.route('/restart_jupyter', methods=['GET'])
def restart_jupyter():
    try:
        # define the script path
        script_path = os.path.dirname(os.path.abspath(__file__)) + "/../../my_note/restart.sh"
        # start the script and don't wait for it to complete
        subprocess.Popen(["bash", script_path])
        return jsonify(success=True)
    except Exception as e:
        return jsonify(success=False, message=str(e))


@app.route('/run_code', methods=['POST'])
def run_code():
    try:
        code = request.form.get('code')
        key = request.form.get('key')
        if not code or not key:
            return jsonify(success=False, message='code and key 不可为空')
        run_code_id = str(time.time())
        try:
            util.log_local.run_data = {
                "run_code_id": run_code_id
            }
            ctx = {}
            exec(code, ctx)
        finally:
            # 清理线程局部变量
            if hasattr(util.log_local, "run_data"):
                del util.log_local.run_data
        return jsonify(success=True, data=ctx.get(key), run_code_id=run_code_id)
    except:
        return jsonify(success=False, message=traceback.format_exc(), run_code_id=run_code_id)


@app.route('/trigger_jobs', methods=['POST'])
def trigger_jobs():
    try:
        job_group_name = request.form.get('job_group')
        job_code = request.form.get('job_code')
        job_group = one_runtime.jobs.get(job_group_name)
        if not job_group or not job_code:
            return jsonify(success=False, message='module不存在， job_code 不可为空')
        job_obj = job_group.get('jobs').get(job_code)
        if not job_obj:
            return jsonify(success=False, message='job不存在')

        def run_job():
            ctx = JobCtx(id=job_obj.get('id'), weeks=[1, 2, 3, 4, 5, 6, 7], time_str='1')
            getattr(job_group.get('module'), job_code)(ctx)

        threading.Thread(target=run_job).start()
        return jsonify(success=True, data='success')
    except:
        return jsonify(success=False, message=traceback.format_exc())


@app.route('/load_jobs', methods=['GET'])
def load_jobs():
    try:
        jobs = []
        all_days = {1, 2, 3, 4, 5, 6, 7}
        for job_group, val in one_runtime.jobs.items():
            for job_code, job in val.get('jobs').items():
                week_str = '每天' if set(job.get('weeks')).issubset(all_days) else ('每周' + '，'.join(str(week) for week in job.get('weeks')))
                time_str = '；每隔' + job.get('times')[0] + '分钟' if job.get('times')[0].isdigit() else ('；在' + '，'.join(job.get('times')))
                jobs.append({
                    'id': job.get('id'),
                    'job_group': val.get('job_group'),
                    'job_code': job_code,
                    'job_body': job.get('body'),
                    'job_name': job.get('desc'),
                    'schedule_times': week_str + time_str + '调度一次'
                })
        return jsonify(success=True, rows=jobs)
    except:
        return jsonify(success=False, message=traceback.format_exc())


if __name__ == '__main__':
    # 脚本系统
    if one_runtime.get_config("ENABLE_PROMPTS","false") == 'true':
        try:
            script.refresh_scripts()
        except Exception as e:
            util.log('refresh_scripts_error', {
                'error': e
            })
        # 刷新评测集
        start_refresh_data_periodically()

    try:
        job.refresh_jobs()
    except Exception as e:
        util.log('refresh_jobs_error', {
            'error': e
        })

    # 调度任务
    Thread(target=job.__open_job_trigger).start()
    # 服务器才开始调度
    if one_runtime.get_config('serverip') == util.get_ip():
        job.start_trigger()
    # web
    if util.get_ip() != one_runtime.get_config('serverip'):
        app.debug = True  # 开启debug模式
        pass

    app.run(host='0.0.0.0', port=5678, threaded=True)
