from dao.BaseRepository import BaseRepository
import asyncio
import datetime
import logging
import os
import sys
import time
import json
import requests
import random
from bs4 import BeautifulSoup

from config.dir_config import LOG_DIR

req_session = requests.session()
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(
    filename=os.path.join(LOG_DIR, str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) + ".log"), mode="w",
    encoding="utf-8")
handler.setLevel(logging.DEBUG)
handler2 = logging.StreamHandler()
handler2.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler2.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(handler2)
BASE_URL = "https://www.xdgame.com/"
cookies = {
    '__gads': 'ID=e2850afc71ccc05d-222fc52c2cdd0035:T=1681185491:RT=1681185491:S=ALNI_MY2iNfW6Z4AtdzoNP7cqKpNyYzbTw',
    'night': '0',
    'Hm_lvt_1905089d52b6f08f01b437535400116c': '1683034448,1683037673,1683040417,1683079259',
    '__gpi': 'UID=00000bdd82a15ad5:T=1681185491:RT=1683079259:S=ALNI_MYO9vAWfS5M5IORt9RSe8ItkZPMfw',
    'PHPSESSID': 'fa5n41rn46cl5712jn6n0ujog0',
    'DedeUserID': '88281',
    'DedeUserID__ckMd5': 'df11f66792ab3b59',
    'DedeLoginTime': '1683079423',
    'DedeLoginTime__ckMd5': '4441e4e68596034a',
    'Hm_lpvt_1905089d52b6f08f01b437535400116c': '1683079440',
}

headers = {
    'authority': 'www.xdgame.com',
    'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
    'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
    'cache-control': 'max-age=0',
    # 'cookie': '__gads=ID=e2850afc71ccc05d-222fc52c2cdd0035:T=1681185491:RT=1681185491:S=ALNI_MY2iNfW6Z4AtdzoNP7cqKpNyYzbTw; night=0; Hm_lvt_1905089d52b6f08f01b437535400116c=1683034448,1683037673,1683040417,1683079259; __gpi=UID=00000bdd82a15ad5:T=1681185491:RT=1683079259:S=ALNI_MYO9vAWfS5M5IORt9RSe8ItkZPMfw; PHPSESSID=fa5n41rn46cl5712jn6n0ujog0; DedeUserID=88281; DedeUserID__ckMd5=df11f66792ab3b59; DedeLoginTime=1683079423; DedeLoginTime__ckMd5=4441e4e68596034a; Hm_lpvt_1905089d52b6f08f01b437535400116c=1683079440',
    'referer': 'https://www.xdgame.com/list/1/',
    'sec-ch-ua': '"Chromium";v="112", "Microsoft Edge";v="112", "Not:A-Brand";v="99"',
    'sec-ch-ua-mobile': '?0',
    'sec-ch-ua-platform': '"Windows"',
    'sec-fetch-dest': 'document',
    'sec-fetch-mode': 'navigate',
    'sec-fetch-site': 'same-origin',
    'sec-fetch-user': '?1',
    'upgrade-insecure-requests': '1',
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36 Edg/112.0.1722.64',
}

baseRep = BaseRepository(collectionname="xdgame")


def get_detail(url, parent_url):
    headers['referer'] = f'{BASE_URL}{parent_url}'

    response = req_session.get(url, cookies=cookies, headers=headers, timeout=30)
    soup = BeautifulSoup(response.text, 'lxml')
    name = soup.select_one("h1").get_text().strip()
    results, num, _ = baseRep.search_no_page(
    {"name": name})
    if num > 0:
        logger.info(f"{name}已经存在,更新code")
        r=results[0]
        code=str(str(url).split(r"/")[-1]).lower().replace(".html","")
        print(code)
        r["code"]=code
        r["url"]=url
        current_id=str(r.get("_id"))
        baseRep.update_by_id(current_id,r)
        time.sleep(3)
                
                # return
            # pan_code = ""
            # if "pwd=" in pan_url:
            #     pan_code = pan_url.split("pwd=")[-1]

            # game = {
            #     "name": name,
            #     "url":url,
            #     "description": description,
            #     "pan_link": pan_url,
            #     "pan_code": pan_code,
            #     "status": "need_save",
            #     "update_time": update_time_text,
            # }
            # logger.info("当前游戏为:"+json.dumps(game, ensure_ascii=False))
            # logger.info("插入数据库")
            # baseRep.insert_one(game)
            

    pass


def main():

    start_page = 1
    end_page = 20
    for page in range(start_page, end_page):
        logger.info(f"开始爬取第{page}页")
        headers['referer'] = f'{BASE_URL}/list/1/list_{page}.html'
        response = requests.get(f'{BASE_URL}/list/1/list_{page}.html',
                                cookies=cookies, headers=headers)
        response.encoding = "gbk2312"
        # print(response.text)
        soup = BeautifulSoup(response.text, 'lxml')
        links = soup.select("a.tit")
        logger.info(f"当前页共有{len(links)}项")
        
        for l in links:

            current_link = l.get("href")
            print(current_link)
        
            try:
                get_detail(f"{BASE_URL}{current_link}",
                           f"/list/1/list_{page}.html")
                time.sleep(10)
            except Exception as e:
                print(e)
                time.sleep(30)
                continue


if __name__ == "__main__":
    main()
