import streamlit as st
import re
from langchain_core.messages import ToolMessage, HumanMessage

def render_ai_message(message, place_holder, chunk_call_back=None):
    content = message["content"]
    content_type = message["type"]
    if content_type == "error":
        st.error(content)
        return
    if content_type == "chunk":
        if chunk_call_back is None:
            place_holder.markdown(content)
        else:
            chunk_call_back(content, place_holder)
    elif content_type == "tool_call_start":
        with place_holder.expander("Tool Call Input", expanded=False, icon="🔨"):
            st.markdown(content)
    elif content_type == "tool_call_end":
        with place_holder.expander("Tool Call Output", expanded=False, icon="📍"):
            st.markdown(content)
    elif content_type == "artifact":
        with place_holder:
            st.plotly_chart(
                content,
                key=f"plotly_chart_temporary",
                use_container_width=True,
                config={'displayModeBar': False})
    elif content_type == "usage_metadata":
        with place_holder.expander("Chat Meta Info", expanded=False):
            st.json(content)
    else:
        # st.markdown(content)
        raise ValueError("not implemented")

def decode_content_with_tag(content, tag):
    pattern = r"(<think>(.*?)</think>)|([^<]+|<(?!/?think>))"
    pattern = pattern.replace("think", tag)
    for match in re.finditer(pattern, content, re.DOTALL):
        if match.group(1):  # 捕获组1：标签内
            yield {"content": match.group(2).strip(), "tag": tag}
        elif match.group(3):  # 捕获组3：标签外
            yield {"content": match.group(3).strip(), "tag": "normal"}

def decode_thinking_tag(content, place_holder=None):
    tags = ["think", "tool_start", "tool_end"]
    content = [{"content": content, "tag": "normal"}]
    res = []
    for tag in tags:
        for cont in content:
            if cont["tag"] == "normal":
                res += list(decode_content_with_tag(cont["content"], tag))
            else:
                res.append(cont)
        content = res
        res = []

    if place_holder is None:
        place_holder = st.container()

    with place_holder:
        for cont in content:
            if cont['tag'] == "normal":
                st.markdown(cont["content"])
            elif cont['tag'] == "think":
                st.expander("Think", expanded=False, icon="🤔").markdown(cont["content"])
            elif cont['tag'] == "tool_start":
                st.expander("Tool Call", expanded=False).markdown(cont["content"])
            elif cont['tag'] == "tool_end":
                st.expander("Tool Result", expanded=False).markdown(cont["content"])
            else:  # cont['tag'] == "code":
                st.code(cont["content"])

class HistoryMessage:
    def __init__(self):
        self.messages = []
    
    def add_user_message(self, content: str):
        self.messages.append({"role": "user", "content": content})
    
    def add_ai_message(self, content: list[dict]):
        self.messages.append({"role": "assistant", "content": content})

    def clear(self):
        self.messages.clear()

    def _render_ai_message(self, messages: list):
        for message in messages:
            place_holder = st.container()
            render_ai_message(message, place_holder, decode_thinking_tag)
    
    def render(self):
        for m in self.messages:
            if m["role"] == "user":
                st.chat_message("user", avatar="🧑‍💻").write(m["content"])
            elif m["role"] == "assistant":
                with st.chat_message("assistant", avatar="🤖"):
                    self._render_ai_message(m["content"])

async def ainvoke_with_render(graph_runner, st_messages, st_placeholder):
    if isinstance(st_messages, str):
        st_messages = [HumanMessage(content=st_messages)]
    container = st_placeholder
    place_holder = container.empty()
    index = 0
    graph_runner.messages.clear()
    async for message in graph_runner.astream(st_messages):
        next_index = message["index"]
        if next_index != index:
            index = next_index
            place_holder = container.empty()
        render_ai_message(message, place_holder)
    return graph_runner.messages