import json

from flask import Response
from langchain_core.messages import AIMessage, HumanMessage

from owl_ai.domain.agent_config_entity import AgentConfigEntity
from owl_ai.domain.chat_entity import ChatMessage
from owl_ai.service import ChatFilesService
from owl_ai.service.chat_messages_service import ChatMessageService, ChatLLMService, ChatService
from owl_ai.service.graph.app_graph_compile import AppWorkFlowCompile
from owl_common.base.model import AjaxResponse
from owl_common.descriptor.serializer import JsonSerializer
from owl_common.descriptor.validator import BodyValidator
from .dto import ChatMessageDTO, ChatDTO
from ... import reg


@reg.api.route("/ai/chat_stream", methods=["POST"])
@BodyValidator()
@JsonSerializer()
def chat_stream(dto: ChatMessageDTO):
    # 使用chatId查询历史对话记录
    chat_message = dto.toMessageEntity()
    chat_message.role = 'user'
    ChatMessageService.insert_message(chat_message)

    history_messages = ChatMessageService.find_messages_by_chat_id(chat_message)
    # 转换为AI Messages
    chat_messages = []
    for message in history_messages:
        role = message.role
        if role == 'assistant':
            chat_messages.append(AIMessage(content=message.content))
        elif role == 'user':
            contents = []
            for file_id in message.files:
                chat_file = ChatFilesService.find_file_by_id(file_id)
                if chat_file.suffix == 'docx':
                    content = ChatFilesService.read_file_content(chat_file)
                    contents.append({
                        'type': 'text',
                        'text': content
                    })
            contents.append({
                'type': 'text',
                'text': message.content
            })

            chat_messages.append(HumanMessage(content=contents))

    # LLM chat
    messages = ChatLLMService.chat_stream(dto.chat_id, chat_messages=chat_messages)

    def save_ai_message(ai_message: ChatMessage):
        ChatMessageService.insert_message(ai_message)

    def generate():
        with reg.app.app_context():
            ai_message = ""
            for stream_message in messages:
                chunk = stream_message[0]
                if len(chunk.content):
                    json_data = {
                        'type': "ai_message_chunk",
                        'content': chunk.content
                    }
                    ai_message += chunk.content
                    #yield f'event: message\ndata: {json.dumps(json_data, ensure_ascii=False)}\n\n'
                    yield f'data: {json.dumps(json_data, ensure_ascii=False)}\n\n'
            ai_chat_message = ChatMessage()
            ai_chat_message.id = chat_message.id + 1
            ai_chat_message.chat_id = chat_message.chat_id
            ai_chat_message.role = 'assistant'
            ai_chat_message.content = ai_message

            save_ai_message(ai_chat_message)

    return Response(generate(), mimetype='text/event-stream')


@reg.api.route("/ai/chat_new", methods=["POST"])
@BodyValidator()
@JsonSerializer()
def chat_new(dto: ChatDTO):
    ai_chat = dto.toAIChat()
    ai_chat.create_by_user(name=1)
    ai_chat.update_by_user(name=1)
    chat_id = ChatService.insert_chat(ai_chat)
    ai_chat.id = chat_id

    return AjaxResponse.from_success(msg="操作成功", data=ai_chat)


@reg.api.route("/ai/chat/find/list", methods=["POST"])
@BodyValidator()
@JsonSerializer()
def chat(dto: ChatDTO):
    ai_chat = ChatService.find_chat_list(dto.toAIChat())
    return AjaxResponse.from_success(msg="操作成功", data=ai_chat)


@reg.api.route("ai/chat/app/flow", methods=["GET"])
@JsonSerializer()
def work_flow_chat():
    agent_config = AgentConfigEntity()
    agent_config.config = {
        "__start__": {
            "edges": {
                "type": "normal",
                "outNode": "node_1"
            }
        },
        "node_1": {
            "llm": {
                "type": "ollama",
                "url": "http://192.168.21.9:11434/v1",
                "modelName": "deepseek-r1:14b",
                "params": {
                    "num_ctx": 16384,
                    "num_predict": 16384
                }
            },
            "systemPrompt": "你是一个小说家，根据用户主题为用户编写小说内容",
            "edges": {
                "type": "normal",
                "outNode": "node_2"
            }
        },
        "node_2": {
            "llm": {
                "type": "ollama",
                "url": "http://192.168.21.9:11434/v1",
                "modelName": "deepseek-r1:14b",
                "params": {
                    "num_ctx": 16384,
                    "num_predict": 16384
                }
            },
            "systemPrompt": "你是一个小说校对员，你需要校对小说内容逻辑是否合理，并给出修正后的内容",
            "edges": {
                "type": "normal",
                "outNode": "__end__"
            }
        }
    }

    work_flow = AppWorkFlowCompile.compile(config=agent_config)
    ret = work_flow.invoke(input={
        "messages": [
            HumanMessage("以中国川菜为主题，写一篇关于美食的短篇小说，限定50字")
        ]
    })
    return AjaxResponse.from_success(data=ret)

