import asyncio
import httpx
import json
import time
from asyncio import Semaphore

from huangG.hg import HG
from pingB.pb import PB
from analysis.analysis import Analysis


hg = HG()
pb = PB()


async def crawler_hg(today_lid, max_concurrent=25):
    start_time = time.time()
    print(">>> 皇冠 数据爬取 start <<<")
    semaphore = Semaphore(max_concurrent)  # 设置最大并发数
    async with httpx.AsyncClient() as client:
        tasks = [hg.get_football_today_detail(client, semaphore, lid) for lid in today_lid]
        results = await asyncio.gather(*tasks)  # 并发执行所有任务
    end_time=time.time()
    print(f">>> 皇冠 数据爬取 end ! 爬取数量: {len(today_lid)} ! 用时: {end_time-start_time} <<<")
    
    # 将 Game_Info 对象列表转换为字典列表
    for gameInfo in results:
        if None == gameInfo: continue
        gameInfo.game_odds = [game.to_dict() for game in gameInfo.game_odds]
    results = [game.to_dict() for game in results if game is not None]
    # 写入文件
    with open("data_hg.json", "w", encoding="utf-8") as file:
        json.dump(results, file, indent=4, ensure_ascii=False)
    print(f"数据已成功写入 data_hg.json 文件 >>> {len(results)}")

def crawler_pb():
    start_time = time.time()
    print(">>> 平博 数据爬取 start <<<")
    games_base = pb.query_football_today("1", "false", "1")
    games_special = pb.query_football_other("2", "1")
    games_dc = pb.query_football_other("4", "1")
    end_time=time.time()
    print(f">>> 皇冠 数据爬取 end ! 爬取数量: {len(games_base)} ! 用时: {end_time-start_time} <<<")
    for game_base in games_base:
        for game_special in games_special:
            if game_base.team_id == game_special.team_id:
                game_base.game_odds.extend(game_special.game_odds)
        for game_dc in games_dc:
            if game_base.team_id == game_dc.team_id:
                game_base.game_odds.extend(game_dc.game_odds)
    
    # 将 Game_Info 对象列表转换为字典列表
    for gameInfo in games_base:
        gameInfo.game_odds = [game.to_dict() for game in gameInfo.game_odds]
    games_base = [game.to_dict() for game in games_base]
    # 写入文件
    with open("data_pb.json", "w", encoding="utf-8") as file:
        json.dump(games_base, file, indent=4, ensure_ascii=False)
    print(f"数据已成功写入 data_pb.json 文件 >>> {len(games_base)}")


def run_crawler(flag = "all"):
    if flag == 'read':
        print(" >>> start 比对测测")
        # 比对测试
        with open("data_pb.json", "r", encoding="utf-8") as file:
            pb_games = json.load(file)
        with open("data_hg.json", "r", encoding="utf-8") as file:
            hg_games = json.load(file)
        # return Analysis(hg_games, pb_games).calculate()
        return Analysis(hg_games).calculate()

    if flag == "hg" or flag == "all":
        # 皇冠
        xml_str = """<?xml version="1.0" encoding="UTF-8" ?>
            <serverresponse>
            <status>200</status>
            <msg>100</msg>
            <code_message></code_message>
            <username>ckbv577</username>
            <mid>37000065</mid>
            <uid>99iizvh9jvm37312360l150231b0</uid>
            <passwd_safe>Goblin9527</passwd_safe>
            <ltype>3</ltype>
            <currency>RMB</currency>
            <odd_f>H,M,I,E</odd_f>
            <pay_type>0</pay_type>
            <blackBoxStatus>N</blackBoxStatus>
            <domain>199.26.100.165</domain>
            <t_link>BAD</t_link>
            </serverresponse>
        """
        # 获取登录后的用户id， 皇冠需要登陆后才可以查看数据
        is_uid = hg.get_login_uid(None)
        print(f" >>> uid: {is_uid}")
        if is_uid is not None:
            # 获取皇冠今天的所有比赛id
            today_lid = hg.query_football_today_lid()
            # 使用异步爬取皇冠数据
            asyncio.run(crawler_hg(today_lid))
            
            
            # hg.query_football_today('r')    # 主要玩法
            # hg.query_football_today('rnou')    # 让球&大小球
            # hg.query_football_today('cn')    # 角球
            # hg.query_football_today('rn')    # 罚牌数
            # hg.query_football_today('pd')    # 波胆
            # hg.query_football_today('moua')    # 波胆
            
            # hg.query_football_today_detail()
            # hg.query_football_early_lid()
            # hg.query_football_early()
            
            # 解析xml数据
            # c_data = Crawler_Data()
            # hg.parseData_hot('', c_data)
            # hg.parseData_r_big_large('')
            # hg.parseData_corner('')
            # hg.parseData_card('')
            # hg.parseData_w_big_large('')
    if flag == "pb" or flag == "all":
        # 平博
        crawler_pb()


if __name__ == '__main__':
    # run_crawler('all')
    datas = run_crawler('read')
    for data in datas:
        print(f'---------------------------- {data["profit"]} ----------------------------')
        print(f'{data['kick_off_time']} - {data['league']} - {data['team']}')
        for info in data['infos']:
            print(f'{info["type"]} / {info["ratio"]} / {info["odds"]} / {info["source"]}')
        print(f'---------------------------- end ----------------------------')