import asyncio
import requests
from bs4 import BeautifulSoup
import random
from dao.BaseRepository import BaseRepository
import logging
from config.dir_config import LOG_DIR
import httpx
import os
import sys
import datetime
import time
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(
    filename=os.path.join(LOG_DIR, str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) + ".log"), mode="w",
    encoding="utf-8")
handler.setLevel(logging.DEBUG)
handler2 = logging.StreamHandler()
handler2.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler2.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(handler2)
headers = {
    'authority': 'www.playdds.com',
    'cache-control': 'max-age=0',
    'sec-ch-ua': '" Not;A Brand";v="99", "Google Chrome";v="97", "Chromium";v="97"',
    'sec-ch-ua-mobile': '?0',
    'sec-ch-ua-platform': '"Windows"',
    'upgrade-insecure-requests': '1',
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36',
    'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
    'sec-fetch-site': 'same-origin',
    'sec-fetch-mode': 'navigate',
    'sec-fetch-user': '?1',
    'sec-fetch-dest': 'document',
    'referer': 'https://www.playdds.com/',
    'accept-language': 'en,en-US;q=0.9,zh-CN;q=0.8,zh;q=0.7',
    'cookie': 'PHPSESSID=s3h1gv24p7qtei1a45gh0b1ljm; Hm_lvt_b9d33317ccb3d37c20dd9c421cc0a119=1643177128; wordpress_logged_in_7262589a51af9c16b3217a80ad219ded=mail_89024245%7C1644386802%7C7XVB7oF7OiQhbow2x9o0HqT3NZXADeX8YQyxKjbUt25%7C4d89214903f2d8fc2787893b0513c7001c019ddf8251d49e50444b15b7b9b28c; Hm_lpvt_b9d33317ccb3d37c20dd9c421cc0a119=1643179611',
}


baseRep = BaseRepository(collectionname="long_game")


def get_page(page: int):
    if page == 1:
        url = 'https://www.playdds.com/all/'
    else:
        url = f'https://www.playdds.com/all/page/{page}/'
    response = requests.get(url, headers=headers, timeout=20)
    soup = BeautifulSoup(response.text, 'html5lib')
    titles = soup.find_all("h2", class_="entry-title")
    rs = []
    for title in titles:
        link = title.a.get("href")
        name = title.a.text
        # print(name)
        rs.append(link)

    # print(len(rs))
    # print(rs[0])
    return rs


def get_detail(url):
    res = requests.get(url, headers=headers, timeout=10)
    bd_url = ""
    bd_code = ""
    game_name = ""
    if res and res.status_code == 200:
        soup = BeautifulSoup(res.text, 'html5lib')
        r = soup.select_one("#ripro_v2_shop_down-2 > div:nth-child(3)")
        # print(r)
        h1 = soup.select_one("h1")
        # print(h1.string)
        bd_code = soup.select_one(
            "#ripro_v2_shop_down-2 > div:nth-child(3) > button")
        bd_code = str(bd_code)[-13:-9]
        # print(bd_code)
        game_name = h1.string
        # print(game_name)
        _, num, msg = baseRep.search_no_page({"name": game_name})
        if r and r.a and num == 0:
            down_url = r.a.get("href")
            res = requests.get(down_url, headers=headers,
                               timeout=10, allow_redirects=False)
            print(res.status_code)
            if res.status_code >= 400:
                print("获取数据失败!")
                return False
            if "今日下载次数已用完" in res.text :
                sys.exit(0)
            if res:
                bd_url = res.headers.get("location")
                if bd_url == None:
                    print(res.text)
                    print(f"{down_url} {res.status_code} bd_url 为空获取数据失败!")
                    return False
                game = {
                    "name": game_name,
                    "pan_link": bd_url,
                    "pan_code": bd_code,
                    "status": "need_save"
                }
                logger.debug(game)
                baseRep.insert_one(game)
                return True

        else:
            logger.debug(f"{game_name}已经存在，跳过!")
            return True


def refresh_status():
    pass


if __name__ == "__main__":
    for p in range(1, 30):
        logging.debug(f"开始爬取第{p}页")
        errnum = 0
        try:
            rs = get_page(p)
            for r in rs:
                try:
                    result = get_detail(r)
                    if not result:
                        errnum += 1
                    if errnum > 5:
                        print("报错次数大于5")                        
                        sys.exit(0)
                except:
                    continue
                sec = random.randint(1, 3)
                logger.debug(f"等待{sec}秒")
                time.sleep(sec)
        finally:
            pass
