from dao.BaseRepository import BaseRepository
import asyncio
import datetime
import logging
import os
import sys
import time
import json
import requests
import random
from bs4 import BeautifulSoup

from config.dir_config import LOG_DIR

req_session = requests.session()
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(
    filename=os.path.join(LOG_DIR, str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) + ".log"), mode="w",
    encoding="utf-8")
handler.setLevel(logging.DEBUG)
handler2 = logging.StreamHandler()
handler2.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler2.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(handler2)
BASE_URL = "https://www.aishplay.com/"

cookies = {
    'PHPSESSID': 'buvslq975obd8iapib4qg7kjr2',
}

headers = {
    'authority': 'www.aishplay.com',
    'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
    'accept-language': 'en-US,en;q=0.9',
    'cache-control': 'no-cache',
    'content-type': 'application/x-www-form-urlencoded',
    # 'cookie': 'PHPSESSID=buvslq975obd8iapib4qg7kjr2',
    'origin': 'https://www.aishplay.com',
    'pragma': 'no-cache',
    'referer': 'https://www.aishplay.com/member/login.php',
    'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
    'sec-ch-ua-mobile': '?0',
    'sec-ch-ua-platform': '"macOS"',
    'sec-fetch-dest': 'document',
    'sec-fetch-mode': 'navigate',
    'sec-fetch-site': 'same-origin',
    'sec-fetch-user': '?1',
    'upgrade-insecure-requests': '1',
    'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
}

params = {
    'action': 'login',
    'from': 'index.php',
}

data = {
    'M_email': 'terrydash',
    'M_pwd': '52Xuguoxu',
}

response = req_session.post('https://www.aishplay.com/member/login.php', params=params, cookies=cookies, headers=headers, data=data)


logger.debug(response.text)
logger.debug("terrydash" in response.text)
logger.debug(response.status_code)
# sys.exit(0)

baseRep = BaseRepository(collectionname="aishplay")
# res=req_session.post('https://www.aishplay.com/member/login.php', params=params, cookies=cookies, headers=headers, data=data)


def get_detail(url, parent_url):
    headers['referer'] = f'{BASE_URL}{parent_url}'

    response = req_session.get(url,  timeout=30)
    base_soup = BeautifulSoup(response.text, 'lxml')
    soup=base_soup.select_one("div.pay-box a")
    next_link=soup.get("href")
    print(next_link)
    
    game_name = base_soup.select_one("h1").get_text().strip()
    logger.debug(f"game_name : {game_name}")    
    update_time = base_soup.select_one("time").get_text().strip()
    logger.debug(f"update_time : {update_time}")
    description = base_soup.select_one("div.container p").get_text().strip()
    logger.debug(f"description : {description}")
    i_tags=base_soup.select("span.meta-category a")    
    description_part2 = i_tags[0].get_text().strip()
    description_part3 = i_tags[1].get_text().strip()    
    spans=base_soup.select("div.entry-content.u-text-format.u-clearfix span")    
    description_part4=spans[-1].get_text().strip()
    logger.debug(f"description_part2 : {description_part2}")
    logger.debug(f"description_part3 : {description_part3}")
    logger.debug(f"description_part4 : {description_part4}")
    down_link=base_soup.select_one("div.pay-box a")
    down_page_link=down_link.get("href")
    logger.debug(down_page_link)
    game_id=down_page_link.split("id=")[-1]
    logger.debug(game_id)
    headers['referer']=url
    res=req_session.get(f"{BASE_URL}{down_page_link}" , timeout=30)
    soup = BeautifulSoup(res.text, 'lxml')
    logger.debug(res.status_code)
    logger.debug(soup)
    script=soup.select_one("script")
    script_text=script.get_text().strip()    
    next_url=script_text.split("'")[1]
    genkey=next_url.removeprefix("member/unlogin.php?genkey=").removesuffix("&address=0")
    logger.debug(f"{genkey}")
    logger.debug(f"{next_url}")
    # params={
    #     'genkey': genkey,
    #     'id': game_id,
    #     'type':"fahuo"
    # }
    headers['referer'] = f"{BASE_URL}{down_page_link}"
    res=req_session.get(f"{BASE_URL}{next_url}", timeout=30)
    # res=req_session.get(f"{BASE_URL}member/unlogin.php",params=params,  timeout=30)
    logger.debug(res.status_code)
    logger.debug(res.text)
    soup = BeautifulSoup(res.text, 'lxml')
    textarea=soup.select_one("textarea")
    logger.debug(textarea.get_text().strip())
    sys.exit(0)
    update_time_text = ""
    if len(update_time) >= 2:
        update_time_text = update_time[1].get_text(
        ).strip().removeprefix("更新时间：")
    print(f"update_time : {update_time}")
    links = soup.select("a.downbtn")

    baidupan_links = [l for l in links if "百度网盘" in l.text]
    if len(baidupan_links) > 0:
        baidupan_link = baidupan_links[0]
        baidupan_url = baidupan_link.get("data-url")

        r = req_session.get(f"{BASE_URL}{baidupan_url}",
                         cookies=cookies, headers=headers, allow_redirects=False)
        code=str(url.split("/")[-1].split(".")[0])
        if r.status_code == 302:
            pan_url = r.headers['location']
            ps = soup.select("p")
            # print(len(ps))
            description = ""
            for p in ps:
                if "|" in p.get_text():
                    description = p.get_text()
                    break
            _, num, msg = baseRep.search_no_page(
                {"name": name, "description": description})
            if num > 0:
                logger.info(f"{name}已经存在,不用插入数据库")
                time.sleep(3)
                return
            pan_code = ""
            if "pwd=" in pan_url:
                pan_code = pan_url.split("pwd=")[-1]

            game = {
                "name": name,
                "description": description,
                "pan_link": pan_url,
                "pan_code": pan_code,
                "status": "need_save",
                "update_time": update_time_text,
                "code":code
            }
            logger.info("当前游戏为:"+json.dumps(game, ensure_ascii=False))
            logger.info("插入数据库")
            baseRep.insert_one(game)
            return True,"OK"
        elif r.status_code == 200:
            if "赞助会员24小时内最多能下载50个资源" in r.text:
                logger.info("赞助会员24小时内最多能下载50个资源")
                time.sleep(3)
                return False,"赞助会员24小时内最多能下载50个资源"
            return False,"OK"


def main():

    start_page = 1
    end_page = 5
    for page in range(start_page, end_page):
        logger.info(f"开始爬取第{page}页")
        headers['referer'] = f'{BASE_URL}/index.php?type=product&id=0&orderby=time&page={page}'
        response = req_session.get(f'{BASE_URL}/index.php?type=product&id=0&orderby=time&page={page}')
        response.encoding = "gbk2312"
        
        soup = BeautifulSoup(response.text, 'lxml')
        links = soup.select("div.placeholder a")
        logger.info(f"当前页共有{len(links)}项")
        
        for l in links:

            current_link = l.get("href")
            
        
            try:
                to_url=f"{BASE_URL}/index.php{current_link}"
                logger.debug(f"开始爬取{to_url}")
                result,msg=get_detail(to_url,
                           f"/index.php?type=product&id=0&orderby=time&page={page}")
                
                if not result and str(msg)!="ok":
                    logger.info("今日下载完成，退出")
                    time.sleep(10)
                    sys.exit(0)
                time.sleep(30)
            except Exception as e:
                print(e)
                time.sleep(30)
                continue


if __name__ == "__main__":
    main()
    
