# 简单输入界面
from tkinter import Tk, Label, Entry, Button, messagebox, ttk
from .crawler import WebCrawler
from .ai_processor import process_with_ai
import asyncio


class CrawlerApp:
    def __init__(self):
        """
        初始化 GUI 应用。
        """
        self.root = Tk()
        self.root.title("网页爬虫工具")
        self.root.geometry("500x350")

        # 目标网站输入框
        Label(self.root, text="目标网站:").grid(row=0, column=0, padx=10, pady=10)
        self.url_entry = Entry(self.root, width=30)
        self.url_entry.grid(row=0, column=1, padx=10, pady=10)

        # 起始页输入框
        Label(self.root, text="起始页:").grid(row=1, column=0, padx=10, pady=10)
        self.start_page_entry = Entry(self.root, width=30)
        self.start_page_entry.grid(row=1, column=1, padx=10, pady=10)

        # 结束页输入框
        Label(self.root, text="结束页:").grid(row=2, column=0, padx=10, pady=10)
        self.end_page_entry = Entry(self.root, width=30)
        self.end_page_entry.grid(row=2, column=1, padx=10, pady=10)

        # 自定义字段输入框
        Label(self.root, text="自定义字段:").grid(row=3, column=0, padx=10, pady=10)
        self.fields_entry = Entry(self.root, width=30)
        self.fields_entry.grid(row=3, column=1, padx=10, pady=10)

        # 模型选择下拉菜单
        Label(self.root, text="选择模型:").grid(row=4, column=0, padx=10, pady=10)
        self.model_combobox = ttk.Combobox(self.root, width=27)
        self.model_combobox.grid(row=4, column=1, padx=10, pady=10)
        self._load_models()

        # 开始爬取按钮
        Button(self.root, text="开始爬取", command=self.start_crawler).grid(row=5, column=0, columnspan=2, pady=20)

    def _load_models(self):
        """
        加载模型列表到下拉菜单。
        """
        models = [
            "Qwen/Qwen2.5-7B-Instruct",
            "Qwen/Qwen2.5-Coder-32B-Instruct",
            "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
            "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
            "Qwen/QwQ-32B-Preview",
            "tencent/HunyuanVideo",
            "internlm/internlm2_5-7b-chat",
            "Qwen/Qwen2-7B-Instruct",
            "THUDM/glm-4-9b-chat",
            "THUDM/chatglm3-6b"
        ]
        self.model_combobox['values'] = models
        self.model_combobox.current(0)  # 默认选择第一个模型

    def start_crawler(self):
        """
        启动爬虫任务。
        """
        url = self.url_entry.get()
        start_page = self.start_page_entry.get()
        end_page = self.end_page_entry.get()
        fields = self.fields_entry.get()
        model = self.model_combobox.get()

        if not url:
            messagebox.showwarning("输入错误", "请填写目标网站")
            return

        try:
            start_page = int(start_page)  # 将起始页转换为整数
            end_page = int(end_page)      # 将结束页转换为整数
            if start_page < 1 or end_page < start_page:
                raise ValueError("页数范围无效")
        except ValueError:
            messagebox.showwarning("输入错误", "起始页和结束页必须为整数，且结束页 >= 起始页")
            return

        if not fields:
            messagebox.showwarning("输入错误", "请填写自定义字段")
            return

        if not model:
            messagebox.showwarning("输入错误", "请选择模型")
            return

        asyncio.run(self._run_crawler(url, start_page, end_page, fields, model))

    async def _run_crawler(self, url: str, start_page: int, end_page: int, fields: str, model: str):
        """
        异步运行爬虫任务。
        :param url: 目标网站
        :param start_page: 起始页
        :param end_page: 结束页
        :param fields: 自定义字段
        :param model: 选择的模型
        """
        crawler = WebCrawler(headless=False)
        try:
            html_contents = await crawler.run_playwright(url, start_page, end_page)
            print(f"共爬取 {start_page} 到 {end_page} 页的 HTML 内容。")

            for i, html_content in enumerate(html_contents):
                print(f"\n正在处理第 {start_page + i} 页内容...")
                processed_data = process_with_ai(html_content, fields, model)
                print(f"第 {start_page + i} 页的爬取结果:", processed_data)
        except Exception as e:
            print(f"处理 {url} 时出错:", e)


if __name__ == "__main__":
    app = CrawlerApp()
    app.root.mainloop()
