from dao.BaseRepository import BaseRepository
import asyncio
import datetime
import logging
import os
import sys
import time
import json
import requests
import random
from bs4 import BeautifulSoup

from config.dir_config import LOG_DIR

logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(
    filename=os.path.join(LOG_DIR, str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) + ".log"), mode="w",
    encoding="utf-8")
handler.setLevel(logging.DEBUG)
handler2 = logging.StreamHandler()
handler2.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler2.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(handler2)
BASE_URL = "https://www.xdgame.com/"
cookies = {
    'Hm_lvt_1905089d52b6f08f01b437535400116c': '1666883097',
    'DedeUserID': '88281',
    'DedeUserID__ckMd5': 'df11f66792ab3b59',
    'night': '0',
    'DedeLoginTime': '1669207288',
    'DedeLoginTime__ckMd5': '188207a8b5a7e9f7',
}

headers = {
    'authority': 'www.xdgame.com',
    'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
    'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
    # Requests sorts cookies= alphabetically
    # 'cookie': 'Hm_lvt_1905089d52b6f08f01b437535400116c=1666883097; DedeUserID=88281; DedeUserID__ckMd5=df11f66792ab3b59; night=0; DedeLoginTime=1669207288; DedeLoginTime__ckMd5=188207a8b5a7e9f7',
    'referer': f'{BASE_URL}',
    'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
    'sec-ch-ua-mobile': '?0',
    'sec-ch-ua-platform': '"Windows"',
    'sec-fetch-dest': 'document',
    'sec-fetch-mode': 'navigate',
    'sec-fetch-site': 'same-origin',
    'sec-fetch-user': '?1',
    'upgrade-insecure-requests': '1',
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.56',
}
baseRep = BaseRepository(collectionname="xdgame")

def refresh():
    results, _, _=baseRep.get_all_no_page()
    for r in results:
        current_id=str(r.get("_id"))
        r["status"]="need_save"
        baseRep.update_by_id(current_id,r)



def get_detail(url, parent_url):
    headers = {
        'authority': 'www.xdgame.com',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cache-control': 'max-age=0',
        # Requests sorts cookies= alphabetically
        # 'cookie': 'Hm_lvt_1905089d52b6f08f01b437535400116c=1666883097; DedeUserID=88281; DedeUserID__ckMd5=df11f66792ab3b59; night=0; DedeLoginTime=1669211074; DedeLoginTime__ckMd5=d7d915bf9af144b6',
        'referer': f'{BASE_URL}{parent_url}',
        'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
        'sec-ch-ua-mobile': '?0',
        'sec-ch-ua-platform': '"Windows"',
        'sec-fetch-dest': 'document',
        'sec-fetch-mode': 'navigate',
        'sec-fetch-site': 'same-origin',
        'sec-fetch-user': '?1',
        'upgrade-insecure-requests': '1',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.56',
    }
    response = requests.get(url, cookies=cookies, headers=headers)
    soup = BeautifulSoup(response.text, 'html5lib')
    name = soup.select_one("h1").get_text().strip()
    _, num, msg = baseRep.search_no_page({"name": name})
    if  num>0:
        logger.info(f"{name}已经存在,不用插入数据库")
        return
    links = soup.select("a.downbtn")
    baidupan_links = [l for l in links if "百度网盘" in l.text]
    if len(baidupan_links) > 0:
        baidupan_link = baidupan_links[0]
        baidupan_url = baidupan_link.get("data-url")
        r = requests.get(f"{BASE_URL}{baidupan_url}",
                         cookies=cookies, headers=headers, allow_redirects=False)
        if r.status_code == 302:
            pan_url = r.headers['location']
            ps = soup.select("p")
            print(len(ps))
            description = ""
            for p in ps:
                if "|" in p.get_text():
                    description = p.get_text()
                    break
            
            pan_code = ""
            if "pwd=" in pan_url:
                pan_code = pan_url.split("pwd=")[-1]

            game = {
                "name": name,
                "description": description,
                "pan_link": pan_url,
                "pan_code": pan_code,
                "status": "need_save"
            }
            logger.info("当前游戏为:"+json.dumps(game, ensure_ascii=False))            
            logger.info("插入数据库")
            baseRep.insert_one(game)
            time.sleep(5)
                
           
            

    pass



def main():

    start_page = 1
    end_page = 100
    for page in range(start_page, end_page):
        logger.info(f"开始爬取第{page}页")
        response = requests.get(f'{BASE_URL}/list/1/list_{page}.html',
                                cookies=cookies, headers=headers)
        response.encoding = "gbk2312"
        # print(response.text)
        soup = BeautifulSoup(response.text, 'html5lib')
        links = soup.select("a.tit")
        logger.info(f"当前页共有{len(links)}项")
        for l in links:
            current_link = l.get("href")
            get_detail(f"{BASE_URL}{current_link}", f"/list/1/list_{page}.html")
            


if __name__ == "__main__":
    refresh()
