# api_5103.py
# Create by GF 2025-05-10 23:51

# Python 3 Standard Libraries.
import io
import os
import random
# ..................................................
import flask
# ..................................................
# MySQL Database AI Agent Required Libraries.
import pandas
import pymysql
import sqlalchemy
# ..................................................
import proxy_openai_api_202505

# ##################################################

SIMILAR_TO_OPENAI_API_BASE_URL    = "http://172.16.176.33:3001/v1"

MYSQL_DB_HOST     = '172.16.82.4'
MYSQL_DB_PORT     = '3336'
MYSQL_DB_NAME     = 'working'
MYSQL_DB_USERNAME = 'goufeng'
MYSQL_DB_PASSWORD = 'abcd1234'

# ##################################################

INFORMATION = """
{
  "数据库类型": "mysql"
  "数据库名称": "working",
  "数据表信息": [
    {"数据表名称": "view_用户信息表", "数据表字段": ["用户名称", "用户代码", "部门名称", "部门简称"]}
  ]
}
"""

# ##################################################

def Text_Opr_Content_in_Mark_Extract(Text:str, Mark_Start:str, Mark_End:str, With_Mark:int=1) -> str:

    # Text Operation - Extract The Content in Mark
    #
    # >>> Text = "Here is The Code:\n\n<CODE_START>\nDESCRIBE example_table;\n<CODE_END>"
    # >>> print(Text)
    # Here is The Code:
    #
    # <CODE_START>
    # DESCRIBE example_table;
    # <CODE_END>
    # >>>
    # >>> Content_in_Mark = Text_Opr_Content_in_Mark_Extract(Text, "<CODE_START>\n", "<CODE_END>", 1)
    # >>> print(Content_in_Mark)
    # <CODE_START>
    # DESCRIBE example_table;
    # <CODE_END>
    # >>> Content_in_Mark = Text_Opr_Content_in_Mark_Extract(Text, "<CODE_START>\n", "<CODE_END>", 0)
    # >>> print(Content_in_Mark)
    # DESCRIBE example_table;

    Capture:list = [0, 0]
    Founded:list = [0, 0]
    Content_in_Mark:str = ''
    # ..............................................
    for i in range(0, len(Text), 1):
        Slice_Idx_A = i
        Slice_Idx_B = i + len(Mark_Start)
        if (Text[Slice_Idx_A:Slice_Idx_B] == Mark_Start):
            Capture[0] = Slice_Idx_A
            Founded[0] = 1
            break
    # ..............................................
    if (Founded[0] == 0):  # If The "Mark Start" is not Found, Return "Empty Char".
        return Content_in_Mark
    # ..............................................
    for i in range((Capture[0] + len(Mark_Start)), len(Text), 1):
        Slice_Idx_A = i
        Slice_Idx_B = i + len(Mark_End)
        if (Text[Slice_Idx_A:Slice_Idx_B] == Mark_End):
            Capture[1] = Slice_Idx_B
            Founded[1] = 1
            break
    # ..............................................
    if (With_Mark == 0):
        Capture[0] = Capture[0] + len(Mark_Start)
        Capture[1] = Capture[1] - len(Mark_End)
    # ..............................................
    Content_in_Mark = Text[Capture[0]:Capture[1]]
    # ..............................................
    return Content_in_Mark

# ##################################################

app = flask.Flask(__name__)

# ##################################################

@app.route("/v1", methods=["GET"])
def api_proxy_openai_v1():

    return '', 200

@app.route("/v1/favicon.ico", methods=["GET"])
def api_proxy_openai_v1_favicon_ico():

    return '', 200

@app.route("/v1/models", methods=["GET"])
def api_proxy_openai_v1_models():

    received_api_key = flask.request.headers.get("Authorization", '')
    # ..............................................
    if (received_api_key == ''):
        return flask.jsonify({"error": "missing api key"}), 401
    # ..............................................
    this_proxy_openai_api = proxy_openai_api_202505.proxy_openai_api()
    this_proxy_openai_api.url.update(f"{SIMILAR_TO_OPENAI_API_BASE_URL}/models")
    this_proxy_openai_api.headers.update({"Authorization": received_api_key}).update({"Content-Type": "application/json"})
    # ..............................................
    rps_dict = this_proxy_openai_api.get()
    # ..............................................
    try:
        # 添加自定义模型名称
        rps_dict["data"].append({"id":"assistant-database", "object":"model", "created":1744009868, "owned_by":"system"})
    except Exception as e:
        print("[DEBUG] error:", str(e))
    # ..............................................
    return flask.jsonify(rps_dict), 200

@app.route("/v1/chat/completions", methods=["POST"])
def api_proxy_openai_v1_chat_completions():

    # Example:
    # - curl -H "Content-Type: application/json" \
    # -      -H "Authorization: Bearer please-enter-openai-api-key" \
    # -      -X POST \
    # -      -d '{"messages": [{"role": "user", "content": "Can you here me?"}]}' \
    # -      http://127.0.0.1:5001/v1/chat/completions
    # Returns:
    # - {'code': 0,
    # -  'message': 'Success',
    # -  'sid': 'cha000bc275@dx196bad449ab9a4b532',
    # -  'choices': [{'message': {'role': 'assistant', 'content': 'Can I help you?'},
    # -               'index': 0}],
    # -  'usage': {'prompt_tokens': 5, 'completion_tokens': 5, 'total_tokens': 10}}

    received_api_key = flask.request.headers.get("Authorization", '')
    # ..............................................
    if (received_api_key == ''):
        return flask.jsonify({"error": "missing api key"}), 401
    # ..............................................
    received_model    = flask.request.json.get("model",    '')
    received_messages = flask.request.json.get("messages", [])
    received_stream   = flask.request.json.get("stream",   False)
    # ..............................................
    # 忽略 JSON 参数中的 "Authorization", "model" (强制调用特定模型)
#   received_api_key = "Bearer NXuRvfuyBjfsOHqtAyRI:NXuRvfuyBjfsOHqtAyRI"
#   received_model   = "4.0Ultra"
    # ..............................................
    try:
        this_proxy_openai_api = proxy_openai_api_202505.proxy_openai_api()
        this_proxy_openai_api.url.update(f"{SIMILAR_TO_OPENAI_API_BASE_URL}/chat/completions")
        this_proxy_openai_api.headers.update({"Authorization": received_api_key})
        this_proxy_openai_api.headers.update({"Content-Type": "application/json"})
        this_proxy_openai_api.json.update({"model":  received_model})
        this_proxy_openai_api.json.update({"stream": received_stream})
        # ..........................................
        history_messages:list = []
        # ..........................................
        history_messages.append({"role": "user",      "content": "[SYSTEM]\n您正在帮助用户代理 SQL 数据库"})
        history_messages.append({"role": "assistant", "content": "好的, 我将基于 \"帮助用户代理 SQL 数据库\" 这个场景完成任务"})
        history_messages.append({"role": "user",      "content": "[SYSTEM]\n以下是数据库的基本信息:\n::INFORMATION::".replace("::INFORMATION::", INFORMATION)})
        history_messages.append({"role": "assistant", "content": "好的, 我将严格基于提供的信息完成任务"})
        history_messages.append({"role": "user",      "content": "[SYSTEM]\n您将根据用户的意图给出适用的 SQL 语句, 如果 MIDDLEWARE (中间件) 探测到您的输出包含 SQL 语句, 将会协助您执行 SQL 语句"})
        history_messages.append({"role": "assistant", "content": "好的, 我将生成适用的 SQL 语句, 以便用户参考, 以及协助 MIDDLEWARE (中间件) 执行 SQL 语句"})
        history_messages.append({"role": "user",      "content": "[SYSTEM]\n如果 SQL 语句涉及表名, 严格使用用户提供的表名, 不可捏造表名"})  # 可删除
        history_messages.append({"role": "assistant", "content": "好的, 我将基于提供的信息生成适用的 SQL 语句"})                            # 可删除
        history_messages.append({"role": "user",      "content": "[SYSTEM]\n生成的 SQL 语句需要放在 ```sql\n``` 中"})
        history_messages.append({"role": "assistant", "content": "好的, 我会将生成的 SQL 语句放在 ```sql\n``` 中"})
        # ..........................................
        # 将输入的用户消息逐条加入历史消息
        for message in received_messages:
            history_messages.append(message)
        # ..........................................
        this_proxy_openai_api.json.update({"messages": history_messages})
        # ..........................................
        # 获取模型 SQL 语句输出 (直接获取整个输出内容再判断是否包含 SQL 语句)
        if (received_stream == True):
            rps_iter       = this_proxy_openai_api.post()
            rps_iter_cache = list(rps_iter)  # 将 iter_content() 的结果转换成列表, 以备多次使用
            rps_dict       = this_proxy_openai_api.integrate_iter_content(rps_iter_cache)
            rps_asis_ctnt  = rps_dict["choices"][-1]["delta"]["content"]
        else:
            rps_dict       = this_proxy_openai_api.post()
            rps_asis_ctnt  = rps_dict["choices"][-1]["message"]["content"]
        # ..........................................
        if ("```sql" in rps_asis_ctnt):
            # 如果 LLM (大语言模型) 的输出中包含 ```sql\n``` 包裹的 SQL 语句
            sql_statment      = Text_Opr_Content_in_Mark_Extract(rps_asis_ctnt, "```sql\n", "\n```", With_Mark=0)
            sucess_or_failure = "null"
            # ......................................
            try:
                engine            = sqlalchemy.create_engine(f"mysql+pymysql://{MYSQL_DB_USERNAME}:{MYSQL_DB_PASSWORD}@{MYSQL_DB_HOST}:{MYSQL_DB_PORT}/{MYSQL_DB_NAME}")
                query_result      = pandas.read_sql_query(sql_statment, con = engine)
                query_result_json = query_result.to_json(orient="records", force_ascii=False)
                sucess_or_failure = "sucess"
            except Exception as e:
                print(e)
                sucess_or_failure = "failure"
            # ......................................
            if (sucess_or_failure == "sucess"):
                # 如果 LLM (大语言模型) 的输出的 SQL 语句被成功执行
                history_messages.append({"role": "user",      "content": "[MIDDLEWARE]\n我的 SQL 语句执行结果中如果包含 JSON, 请以 | column_1 | column_2 | ... | column_n | (Markdown 表格) 形式响应给用户"})
                history_messages.append({"role": "assistant", "content": "好的, 如果你的的 SQL 语句执行结果中包含 JSON, 我将以以 | column_1 | column_2 | ... | column_n | (Markdown 表格) 形式响应给用户"})
                history_messages.append({"role": "user",      "content": "[MIDDLEWARE]\n这是 SQL 的执行结果\n::QUERY_RESULT::".replace("::QUERY_RESULT::", query_result_json)})
                # ..................................
                this_proxy_openai_api.json.update({"messages": history_messages})
                # ..................................
                # SQL 语句执行成功, 返回响应结果
                if (received_stream == True):
                    rps_iter = this_proxy_openai_api.post()
                    # 流式响应处理
                    def generate():
                        for chunk in rps_iter:
                            if chunk:
                                yield chunk
                    # ..............................
                    # OpenAI API 202505 的流式响应通常使用 Server-Sent Events (SSE),
                    # 即 text/event-stream 类型,
                    # 每个数据块以 "data: " 开头, 后跟 JSON 数据, 并以两个换行符结束
                    return flask.Response(generate(), content_type="text/event-stream")
                else:
                    rps_dict = this_proxy_openai_api.post()
                    # ..............................
                    return flask.jsonify(rps_dict), 200
        # ..........................................
        # 如果模型输出中不包含 SQL 语句, 则常规输出
        if (received_stream == True):
            # 流式响应处理
            def generate():
                for chunk in rps_iter_cache:  # 复用 iter_content() 的结果
                    if chunk:
                        yield chunk
            # ..............................
            return flask.Response(generate(), content_type="text/event-stream")
        else:
            return flask.jsonify(rps_dict), 200
    except Exception as e:
        print("[DEBUG] error:", str(e))
        return flask.jsonify({"error": str(e)}), 500

if __name__ == '__main__':

#   app.run(host="0.0.0.0", port=5103, debug=True)  # 调试模式
    app.run(host="0.0.0.0", port=5103)

# EOF Signed by GF.
