import gradio as gr
from app.dependencies import get_llm_model

class ChatInterface:
    def __init__(self):
        """初始化聊天界面"""
        try:
            self.model = get_llm_model()
        except Exception as e:
            print(f"模型加载失败: {e}")
            raise

    def create_interface(self) -> gr.Interface:
        """创建 Gradio 界面"""
        with gr.Blocks(title="TinyLlama Chat", theme=gr.themes.Soft()) as interface:
            gr.Markdown("""
                # TinyLlama Chat
                与 AI 助手进行对话
            """)
            
            with gr.Row():
                with gr.Column(scale=4):
                    chatbot = gr.Chatbot(
                        height=600,
                        show_label=False,
                        avatar_images=["🧑", "🤖"],
                        bubble_full_width=False,
                        render_markdown=True,
                    )
                    
                    with gr.Row():
                        msg = gr.Textbox(
                            show_label=False,
                            placeholder="请输入您的问题，按 Enter 发送...",
                            lines=2,
                            container=False,
                        )
                        with gr.Column(scale=1, min_width=100):
                            submit = gr.Button("发送", variant="primary")
                            clear = gr.Button("清除")

            def process_bot_response(response: str) -> str:
                """处理机器人的响应，移除特殊标记并格式化"""
                # 移除所有特殊标记
                markers = ["<|assistant|>", "<|user|>", "", ""]
                for marker in markers:
                    response = response.replace(marker, "")
                
                # 提取实际的回复内容
                if ":" in response:
                    response = response.split(":", 1)[1]
                
                return response.strip()

            def user(user_message, history):
                """处理用户输入"""
                if not user_message.strip():  # 忽略空消息
                    return "", history
                return "", history + [[user_message, None]]

            def bot(history):
                """处理 AI 响应"""
                try:
                    if not history:
                        return history
                    
                    user_message = history[-1][0]
                    bot_message = self.model.generate(user_message)
                    bot_message = process_bot_response(bot_message)
                    
                    # 更新历史
                    history[-1][1] = bot_message
                    return history
                except Exception as e:
                    print(f"生成回复时出错: {e}")
                    history[-1][1] = "抱歉，生成回复时出错了。"
                    return history

            # 绑定回车键到发送操作
            msg.submit(
                user,
                inputs=[msg, chatbot],
                outputs=[msg, chatbot],
                queue=False
            ).then(
                bot,
                inputs=chatbot,
                outputs=chatbot
            )

            # 发送按钮点击事件
            submit.click(
                user,
                inputs=[msg, chatbot],
                outputs=[msg, chatbot],
                queue=False
            ).then(
                bot,
                inputs=chatbot,
                outputs=chatbot
            )

            # 清除按钮点击事件
            clear.click(
                lambda: (None, None), 
                None, 
                [msg, chatbot],
                queue=False
            )

        return interface