import json

from flask import Response, request
from langchain_core.messages import AIMessageChunk

from owl_admin import reg
from owl_ai.application.chat_llm_app_service import ChatLLMAppService, ChatAgentService, WorkFlowService
from owl_ai.application.work_flow_app_service import WorkFlowAppService
from owl_ai.domain.chat_entity import ChatMessage
from owl_ai.service.chat_messages_service import ChatMessageService
from owl_common.descriptor.serializer import JsonSerializer
from owl_common.descriptor.validator import BodyValidator


def save_ai_message(ai_chat_message):
    ChatMessageService.insert_message(ai_chat_message)


def generate(chunks, chat_message: ChatMessage):
    with reg.app.app_context():
        ai_message_content = []
        for stream_message in chunks:
            chunk = stream_message[0]
            metadata = stream_message[1]

            nodeName = metadata.get('langgraph_node')
            json_data = {}
            if isinstance(chunk, AIMessageChunk):
                if chunk.tool_calls:
                    json_data = {
                        'type': "tool_call",
                        'nodeName': nodeName,
                        'tool_name': chunk.tool_calls[0]['name'],
                        'tool_calls': chunk.tool_calls
                    }
                    ai_message_content.append(json_data)
                else:
                    json_data = {
                        'type': "ai_message",
                        'nodeName': nodeName,
                        'content': chunk.content
                    }

                    if len(ai_message_content) == 0:
                        ai_message_content.append({
                            "type": 'ai_message',
                            "nodeName": nodeName,
                            "content": chunk.content
                        })
                    elif chunk.content:
                        last_ai_message_content = ai_message_content[-1]
                        if last_ai_message_content.get("nodeName") == nodeName and last_ai_message_content.get('type') == 'ai_message':
                            last_ai_message_content["content"] += chunk.content
                        else:
                            ai_message_content.append({
                                "type": 'ai_message',
                                "nodeName": nodeName,
                                "content": chunk.content
                            })

                # yield f'event: message\ndata: {json.dumps(json_data, ensure_ascii=False)}\n\n'
                yield f'data: {json.dumps(json_data, ensure_ascii=False)}\n\n'

        ai_chat_message = ChatMessage()
        ai_chat_message.id = chat_message.id + 1
        ai_chat_message.chat_id = chat_message.chat_id
        ai_chat_message.role = 'assistant'
        ai_chat_message.content = {
            'content': ai_message_content
        }

        save_ai_message(ai_chat_message)


@reg.api.route('/ai/app/chat_llm', methods=['POST'])
@BodyValidator()
@JsonSerializer()
def chat_llm(dto: ChatMessage):
    result = ChatLLMAppService.chat_llm(dto)
    return Response(generate(result, dto), mimetype='text/event-stream')


@reg.api.route('/ai/app/chat_agent', methods=['POST'])
@BodyValidator()
@JsonSerializer()
def chat_agent(dto: ChatMessage):
    result = ChatAgentService.chat_agent(dto)
    return Response(generate(result, dto), mimetype='text/event-stream')


@reg.api.route('/ai/app/work_flow', methods=['POST'])
@BodyValidator()
@JsonSerializer()
def work_flow(dto: ChatMessage):
    result = WorkFlowAppService.invoke(dto)
    return Response(generate(result, dto), mimetype='text/event-stream')
