import tkinter as tk
from tkinter import ttk
import sv_ttk
from tkcalendar import Calendar
from datetime import datetime
import logging
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from database_utils import query_database  # 假设database_utils模块存在
import threading
import sys
import os

# 配置日志记录
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')

class ScrapyGUIApp:
    def __init__(self, root):
        self.root = root
        self.root.title("中标公告爬虫程序")

        # 自动检测系统主题并应用相应主题
        system_theme = self.get_system_theme()
        sv_ttk.set_theme(system_theme)

        # 创建一个框架来放置开始日期输入框和运行爬虫按钮
        self.control_frame = ttk.Frame(root)
        self.control_frame.pack(pady=20)

        # 开始日期输入框
        self.start_date_entry = ttk.Entry(self.control_frame, width=25)
        self.start_date_entry.pack(side=tk.LEFT, padx=5)
        self.start_date_entry.bind("<1>", self.show_calendar)

        # 获取当前日期并格式化为 YYYY-MM-DD 00:00:00
        today = datetime.today().strftime("%Y-%m-%d 00:00:00")
        self.start_date_entry.insert(0, today)

        # 运行爬虫按钮
        self.run_button = ttk.Button(self.control_frame, text="运行爬虫", command=self.start_spider_thread)
        self.run_button.pack(side=tk.LEFT, padx=5)

        # 查询数据按钮
        self.query_button = ttk.Button(self.control_frame, text="查询数据", command=self.show_query_results)
        self.query_button.pack(side=tk.LEFT, padx=5)

        # 状态标签
        self.status_label = ttk.Label(root, text="等待运行...")
        self.status_label.pack(pady=10)

        # 创建文本框用于显示爬虫结果
        self.result_text = tk.Text(root, height=20, width=80)
        self.result_text.pack(pady=20)
        # 配置左对齐标签
        self.result_text.tag_config('left', justify=tk.LEFT)
        
        print(sys.path)

    def get_system_theme(self):
        try:
            import winreg
            key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Themes\Personalize')
            value, _ = winreg.QueryValueEx(key, 'AppsUseLightTheme')
            winreg.CloseKey(key)
            return 'light' if value == 1 else 'dark'
        except Exception as e:
            logging.error(f"获取系统主题模式时出错: {e}")
            return 'light'

    def show_calendar(self, event):
        def select_date():
            try:
                selected_date = cal.selection_get()
                formatted_date = selected_date.strftime("%Y-%m-%d 00:00:00")
                self.start_date_entry.delete(0, tk.END)
                self.start_date_entry.insert(0, formatted_date)
                top.destroy()
            except Exception as e:
                logging.error(f"选择日期时出错: {e}")

        top = tk.Toplevel(self.root)
        # 获取当前日期
        now = datetime.now()
        year = now.year
        month = now.month
        day = now.day
        # 使用当前日期初始化 Calendar 组件
        cal = Calendar(top, selectmode='day', year=year, month=month, day=day)
        cal.pack(pady=20)
        ok_button = ttk.Button(top, text="确定", command=select_date)
        ok_button.pack(pady=10)

    def start_spider_thread(self):
        # 禁用运行爬虫按钮
        self.run_button.config(state=tk.DISABLED)
        thread = threading.Thread(target=self.run_spider)
        thread.start()

    def run_spider(self):
        try:
            start_date = self.start_date_entry.get()
            self.status_label.config(text="爬虫开始运行...")
            self.result_text.delete(1.0, tk.END)
    
            # 设置工作目录
            if getattr(sys, 'frozen', False):
                # 如果是打包后的可执行文件
                base_path = sys._MEIPASS
                os.chdir(base_path)
            else:
                # 如果是直接运行的Python脚本
                base_path = os.path.dirname(os.path.abspath(__file__))
                os.chdir(base_path)
    
            # 获取Scrapy项目设置
            settings = get_project_settings()
            # 配置日志输出到GUI界面
            settings.set('LOG_ENABLED', True)
            settings.set('LOG_LEVEL', 'INFO')
    
            # 创建CrawlerProcess对象
            process = CrawlerProcess(settings)
    
            # 定义一个日志处理器，将日志信息输出到GUI界面
            class GuiLogHandler(logging.Handler):
                def __init__(self, text_widget):
                    logging.Handler.__init__(self)
                    self.text_widget = text_widget
    
                def emit(self, record):
                    msg = self.format(record)
                    self.text_widget.insert(tk.END, msg + '\n', 'left')
                    self.text_widget.see(tk.END)
    
            # 创建日志处理器并添加到Scrapy日志记录器
            gui_handler = GuiLogHandler(self.result_text)
            logging.getLogger('scrapy').addHandler(gui_handler)
    
            # 启动爬虫
            process.crawl('gzzyjson', start_date=start_date)
            process.start()
    
            self.status_label.config(text="爬虫运行成功！")
        except Exception as e:
            logging.error(f"运行爬虫时出错: {e}")
            self.status_label.config(text=f"发生错误: {e}")
        finally:
            # 爬虫运行成功后，销毁运行爬虫按钮
            self.run_button.destroy()
            # 创建退出程序按钮
            self.exit_button = ttk.Button(self.control_frame, text="退出程序", command=self.root.destroy)
            self.exit_button.pack(side=tk.LEFT, padx=5)

    def show_query_results(self):
        # 传递 status_label 和 result_text 参数
        query_database(self.root, self.status_label, self.result_text)

if __name__ == "__main__":
    root = tk.Tk()
    app = ScrapyGUIApp(root)
    root.mainloop()

# pyinstaller --onefile --noconsole --name "gzzy-scrapy" e:/dk_www/myspider/windows-main.py --add-data "e:/dk_www/myspider/myspider;myspider" --add-data "e:/dk_www/myspider/scrapy.cfg;."
