import sys
import multiprocessing
from PyQt6.QtWidgets import QApplication

from main_ui import MainWindow
from main_crawler import run_crawler

import requests
import time


# 爬虫函数
def spider(queue, stop_event):
    while not stop_event.is_set():  # 检查是否收到停止信号
        try:
            # 将爬取到的数据放入队列
            games = run_crawler()
            # 使用字典去重
            unique_games = {}
            for game in games:
                key = get_unique_key(game)
                if key not in unique_games:
                    unique_games[key] = game
            # 将结果转换回列表
            unique_games_list = list(unique_games.values())
            sorted_games = sorted(unique_games_list, key=extract_profit, reverse=True)
            queue.put(sorted_games)
            # print("爬取到数据:", data)
            time.sleep(10)  # 每5秒爬取一次
        except Exception as e:
            print("爬虫出错:", e)
            time.sleep(10)

def get_unique_key(game):
    '''用于提取唯一标识 - 去重'''
    return (game['kick_off_time'], game['league'], game['team'])

def extract_profit(game):
    '''用去百分比排序'''
    profit_str = game['profit']
    profit_value = float(profit_str.strip('%'))  # 去掉百分号并转换为浮点数
    return profit_value

def main():
    # 创建进程间通信队列和停止事件
    queue = multiprocessing.Queue()
    stop_event = multiprocessing.Event()

    # 创建爬虫进程
    spider_process = multiprocessing.Process(target=spider, args=(queue, stop_event))
    spider_process.start()

    # 创建PyQt6应用程序
    app = QApplication(sys.argv)
    window = MainWindow(queue, stop_event)
    window.setWindowOpacity(0.95)
    window.show()

    # 启动PyQt6应用程序
    app.exec()

    # 确保爬虫进程结束
    spider_process.terminate()
    spider_process.join()

if __name__ == '__main__':
    main()
    # # 创建两个线程
    # ui_thread = threading.Thread(target=run_ui)
    # crawler_thread = threading.Thread(target=run_crawler)

    # # 启动两个线程
    # ui_thread.start()
    # crawler_thread.start()