# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import os
from typing import Annotated, Literal, Sequence, TypedDict

import aiohttp
import asyncio
import asyncpg
from langchain_core.messages import BaseMessage, HumanMessage
from langchain_core.prompts import PromptTemplate
from langgraph.graph import StateGraph
from langgraph.graph.message import add_messages
from langgraph.managed import IsLastStep

from ..base_agent import BaseAgent


def setup_chat_model(args):
    from langchain_openai import ChatOpenAI

    params = {
        "temperature": args.temperature,
        # "max_tokens": args.max_new_tokens,
        "top_p": args.top_p,
        "streaming": args.stream,
    }
    if args.llm_engine == "vllm" or args.llm_engine == "tgi":
        openai_endpoint = f"{args.llm_endpoint_url}/v1"
        llm = ChatOpenAI(
            openai_api_key="EMPTY",
            openai_api_base=openai_endpoint,
            model_name=args.model,
            request_timeout=args.timeout,
            **params,
        )
    elif args.llm_engine == "openai":
        llm = ChatOpenAI(model_name=args.model, request_timeout=args.timeout, **params)
    else:
        raise ValueError("llm_engine must be vllm, tgi or openai")
    return llm


def get_standard_name(task_type):
    return "ISO9001:2015" if task_type.lower() == "iso" else "IATF16949:2016"


EMBEDDING_ENDPOINT_URL = os.getenv("EMBEDDING_ENDPOINT_URL")


class ChunkRetrieveNode:
    def __init__(self, args):
        self.args = args

    async def retrieval(self, file_name, k=5):
        async with aiohttp.ClientSession() as session:
            responses = await session.post(
                EMBEDDING_ENDPOINT_URL,
                json={
                    "input": "TITLE, PURPOSE, OBJECTIVES, SCOPE, ROLES AND RESPONSIBILITIES, AUDIT PROCESS, etc."
                },
            )
            embedding = await responses.json()
            embedding = embedding["data"][0]["embedding"]

        db_path = self.args.db_path
        conn = await asyncpg.connect(db_path)

        pg_table = "iaudit"

        async with conn.transaction():
            embedding_vector_str = "[" + ",".join(map(str, embedding)) + "]"
            results = await conn.fetch(
                f"""
                        SELECT chunk, filename, embedding <=> $1::vector AS similarity
                        FROM {pg_table}
                        WHERE filename = $2
                        ORDER BY similarity ASC
                        LIMIT $3
                    """,
                embedding_vector_str,
                file_name,
                k,
            )

        await conn.close()

        chunks = [result["chunk"] for result in results]

        return chunks

    def __call__(self, state):
        print(f"---CALL {self.__class__.__name__} node---")

        print("State: ", state)

        file_name = state["file_name"]

        def search_chunks(file_name):
            return asyncio.run(self.retrieval(file_name))

        chunks = search_chunks(file_name)

        return {"chunks": "\n".join(chunks)}


class ChecklistNode:
    def __init__(self, args):
        from .prompt import CHECKLIST_PROMPT

        prompt = PromptTemplate(
            template=CHECKLIST_PROMPT,
            input_variables=["standard_name", "input"],
        )

        llm = setup_chat_model(args)
        self.chain = prompt | llm

    def __call__(self, state):
        print(f"---CALL {self.__class__.__name__} node---")

        task_type = state["task_type"]
        chunks = state["chunks"]
        _input = {"standard_name": get_standard_name(task_type), "input": chunks}

        print("Invoke input: ", _input)
        ai_message = self.chain.invoke(_input)
        print("@@@ Output from chain: ", ai_message)

        return {"messages": [ai_message]}


class ReportNode:
    def __init__(self, args):
        from .prompt import REPORT_PROMPT

        prompt = PromptTemplate(
            template=REPORT_PROMPT,
            input_variables=["standard_name", "input"],
        )

        llm = setup_chat_model(args)
        self.chain = prompt | llm

    def __call__(self, state):
        print(f"---CALL {self.__class__.__name__} node---")

        task_type = state["task_type"]
        checklist_message = state["messages"][-1]
        checklist = checklist_message.content
        _input = {"standard_name": get_standard_name(task_type), "input": checklist}

        print("Invoke input: ", _input)
        ai_message = self.chain.invoke(_input)
        print("@@@ Output from chain: ", ai_message)

        return {"messages": [ai_message]}


class AgentState(TypedDict):
    messages: Annotated[Sequence[BaseMessage], add_messages]
    is_last_step: IsLastStep
    file_name: str
    task_type: Literal["ISO", "IATF"]
    chunks: str


class AuditAgent(BaseAgent):
    def __init__(self, args, with_memory=False, **kwargs):
        super().__init__(args, local_vars=globals(), **kwargs)
        chunk_retrieve_node = ChunkRetrieveNode(args=args)
        checklist_node = ChecklistNode(args=args)
        report_node = ReportNode(args=args)  # report

        workflow = StateGraph(AgentState)

        workflow.add_node("chunk_retrieve_node", chunk_retrieve_node)
        workflow.add_node("checklist_node", checklist_node)
        workflow.add_node("report_node", report_node)

        workflow.set_entry_point("chunk_retrieve_node")
        workflow.set_finish_point("report_node")

        workflow.add_edge("chunk_retrieve_node", "checklist_node")
        workflow.add_edge("checklist_node", "report_node")

        self.app = workflow.compile()

    def prepare_initial_state(self, query):
        """
        Extract task type and file name from the query
        query = <task_type>,<file_name>
        """
        q = query.strip().split(",")
        return {
            "messages": [HumanMessage(content=f"Dummy message '{query}'")],
            "task_type": q[0],
            "file_name": q[1],
        }

    async def stream_generator(self, query, config, thread_id=None):
        initial_state = self.prepare_initial_state(query)

        try:
            async for event, value in self.app.astream(
                initial_state, config=config, stream_mode=["messages", "updates"]
            ):
                if event == "messages":
                    msg, meta_data = value
                    yield f"data: {repr(msg.content.encode('utf-8'))}\n\n"
                elif event == "updates":
                    for node_name, node_state in value.items():
                        if node_name == "chunk_retrieve_node":
                            msg = "\nHello! I have retrieved the most relevant sections from the file. And I plan to do this in two steps: \n1. Generate checklist based on the document content. \n2. Generate compliance report based on the generated checklist.\n\n **Step 1: Generate checklist based on the document content.**\n\n"
                        elif node_name == "checklist_node":
                            msg = "\n\n**Step 2: Generate compliance report based on the generated checklist**\n\n"
                        elif node_name == "report_node":
                            msg = ""
                        else:
                            msg = "\n"
                        yield f"data: {repr(msg.encode('utf-8'))}\n\n"

            yield "data: [DONE]\n\n"
        except Exception as e:
            yield str(e)

    async def non_streaming_run(self, query, config):
        initial_state = self.prepare_initial_state(query)
        if "tool_choice" in config:
            initial_state["tool_choice"] = config.pop("tool_choice")
        try:
            async for s in self.app.astream(
                initial_state, config=config, stream_mode="values"
            ):
                message = s["messages"][-1]
                if isinstance(message, tuple):
                    print(message)
                else:
                    message.pretty_print()

            last_message = s["messages"][-1]
            print("******Response: ", last_message.content)
            return last_message.content
        except Exception as e:
            return str(e)
