from dao.BaseRepository import BaseRepository
import asyncio
import datetime
import logging
import os
import sys
import time
import json
import requests
import random
from bs4 import BeautifulSoup

from config.dir_config import LOG_DIR

logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(
    filename=os.path.join(LOG_DIR, str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) + ".log"), mode="w",
    encoding="utf-8")
handler.setLevel(logging.DEBUG)
handler2 = logging.StreamHandler()
handler2.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler2.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(handler2)

cookies = {
    'Hm_lvt_41e10dae8e5d7e8cc907484abce82fb9': '1660900409,1663132540',
    'uchome_2132_saltkey': 'N1H6gGvd',
    'uchome_2132_lastvisit': '1668752445',
    'uchome_2132_sendmail': '1',
    'uchome_2132_con_request_uri': 'https%3A%2F%2Fbbs.3dmgame.com%2Fconnect.php%3Fmod%3Dlogin%26op%3Dcallback%26referer%3Dforum.php',
    'uchome_2132_client_created': '1668756135',
    'uchome_2132_client_token': '9A9C669A39C730D9C0EF0EFFC822A8A8',
    'uchome_2132_ulastactivity': '1668756135%7C0',
    'uchome_2132_auth': '9c49MZO62g5arky58NV9Ns1jI7qlGIS%2BNvw86SUpNrlWaBl%2FKZH5Hqqh0kDDxeWiU3GoZqaMvdwJD8XydOFlUf8khGei',
    'uchome_2132_connect_login': '1',
    'uchome_2132_connect_is_bind': '1',
    'uchome_2132_connect_uin': '9A9C669A39C730D9C0EF0EFFC822A8A8',
    'uchome_2132_stats_qc_login': '3',
    'uchome_2132_security_cookiereport': '3b2fpvXeTXYQyC5wQRnf46lqGztxDj%2FjSDow%2B%2FpmJkSzMm5owSt5',
    'uchome_2132_checkpm': '1',
    'uchome_2132_noticeTitle': '1',
    'uchome_2132_nofavfid': '1',
    'uchome_2132_connect_last_report_time': '2022-11-18',
    'uchome_2132_lastact': '1668756141%09connect.php%09check',
}
# PAGE_NUM = 1

baseRep = BaseRepository(collectionname="3dm_url")


async def crapy_3DMGames(PAGE_NUM):
    chongfu = 0
    start_time = time.time()
    DOWNLOAD_HOME_URL = "https://dl.3dmgame.com/all_all_" + \
        str(PAGE_NUM) + "_time/"
    r = requests.get(DOWNLOAD_HOME_URL,timeout=15)
    html = r.content
    # print(html)
    soup = BeautifulSoup(html, 'html5lib')
    # print(soup)
    task_async = []
    items = soup.select(".item")
    for item in items:
        if chongfu is not None and chongfu >= 5:
            logger.info("chongfudayu10")
            sys.exit()
        link = item.select_one(".a_click")
        if link is not None and "暂无资源" not in str(link):

            if item.select_one(".text >.bt > a") is not None:

                game_name = item.select_one(".text >.bt > a").string
                _, num, msg = baseRep.search_no_page({"name": game_name})
                if num == 0:
                    logger.info("{}不存在,开始爬取!".format(game_name))
                    task_async.append(asyncio.create_task(
                        getDetail(link.get("href"))))
                else:
                    logger.error("{}已不存在,停止爬取!".format(game_name))
                    chongfu = chongfu + 1
    [await t for t in task_async]
    logger.info("第{0}页,耗时{1}".format(PAGE_NUM, str(time.time() - start_time)))
    # lis = soup.select(".a_click")
    # for l in [li for li in lis if "暂无资源" not in str(li)]:
    #     # print(l.get("href"))
    #     getDetail(l.get("href"))


async def getDetail(url):
    logger.info("爬取链接:" + str(url))
    r = requests.get(url,timeout=15)
    r.encoding = "gbk2312"
    html = r.text
    # url.indexOf('https://so.hyds360.com') == -1 ? ('https://so.hyds360.com'+url) :url;
    soup = BeautifulSoup(html, 'html5lib')
    baiduyun_code = soup.select_one(".nxzbdcode").string.replace("(提取码：", "").replace(")", "") if soup.select_one(
        ".nxzbdcode") is not None else None

    game_type = soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(1) > span').string if soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(1) > span') is not None else "未知类型"
    game_publish_date = soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(3) > span').string if soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(3) > span') is not None else "未知日期"
    game_producer = soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(2) > span').string if soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(2) > span') is not None else "未知厂商"
    game_lang = soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(8) > span').string if soup.select_one(
        'body > div.content.clear.game > div.gameinfo > ul > li:nth-child(8) > span') is not None else "未知厂商"
    game_introduce = soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(1)').string if soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(1)') is not None else "未知内容"
    game_play_introduce = soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(4)').string if soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(4)') is not None else "未知玩法"
    game_content_introduce = soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(6)').string if soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(6)') is not None else "未知游戏内容"
    game_img_url = soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_4 > div.large_box > ul > li:nth-child(2) > img').get(
        "src") if soup.select_one(
        'body > div.content.clear.game > div.Content_L > div.GmL_4 > div.large_box > ul > li:nth-child(2) > img') is not None else "未知图片URL"
    downURL: str = str(soup.select_one("#xzhzdownurl").get("value")) if soup.select_one(
        "#xzhzdownurl") is not None else None
    if downURL is not None and not downURL.startswith('https://so.hyds360.com'):
        downURL = 'https://so.hyds360.com' + downURL
    # aid = soup.select_one("#aid").get("value") if soup.select_one("#aid") is not None else None

    if downURL is not None:
        html = requests.get(downURL,timeout=15).text
        soup = BeautifulSoup(html, 'html5lib')
        downURL = soup.select_one("#gowpbtn").get(
            "href") if soup.select_one("#gowpbtn") is not None else None
        if downURL is not None:
            # time.sleep(1)
            html = requests.get(downURL).text
            soup = BeautifulSoup(html, 'html5lib')
            baiduyun_url = soup.select_one(
                "body > div.yxhz_n1_container > div.n1_content > a:nth-child(5)")
            if baiduyun_url is not None:
                baiduyun_url = baiduyun_url.get("href")
            torrent_url = soup.select_one(
                "body > div.yxhz_n1_container > div.n1_content > a:nth-child(6)")
            if torrent_url is not None:
                torrent_url = torrent_url.get("href")
            game_name = soup.select_one(
                "body > div.yxhz_n1_container > div.n1_content > ul > li.gameID_cn")
            if game_name is not None:
                game_name = game_name.string
            game_size = soup.select_one(
                "body > div.yxhz_n1_container > div.n1_content > ul > li.gameSize > span")
            if game_size is not None:
                game_size = game_size.string
            game = {
                "name": game_name,
                "size": game_size,
                "lang": game_lang,
                "type": game_type,
                "producer": game_producer,
                "publish_date": game_publish_date,
                "introduce": game_introduce,
                "play_introduce": game_play_introduce,
                "content_introduce": game_content_introduce,
                "img_url": game_img_url,
                "baiduyun_url": baiduyun_url,
                "baiduyun_code": baiduyun_code,
                "torrent_url": torrent_url
            }

            logger.info(game)
            _, num, msg = baseRep.search_no_page({"name": game_name})
            if num == 0:
                baseRep.insert_one(game)


# browser = webdriver.Chrome()  # 声明浏览器
# browser.get(url)

# 下载3dmgames
def get_page(page: int):

    headers = {
        'authority': 'bbs.3dmgame.com',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cache-control': 'max-age=0',
        # Requests sorts cookies= alphabetically
        # 'cookie': 'Hm_lvt_41e10dae8e5d7e8cc907484abce82fb9=1660900409,1663132540; uchome_2132_saltkey=N1H6gGvd; uchome_2132_lastvisit=1668752445; uchome_2132_sendmail=1; uchome_2132_con_request_uri=https%3A%2F%2Fbbs.3dmgame.com%2Fconnect.php%3Fmod%3Dlogin%26op%3Dcallback%26referer%3Dforum.php; uchome_2132_client_created=1668756135; uchome_2132_client_token=9A9C669A39C730D9C0EF0EFFC822A8A8; uchome_2132_ulastactivity=1668756135%7C0; uchome_2132_auth=9c49MZO62g5arky58NV9Ns1jI7qlGIS%2BNvw86SUpNrlWaBl%2FKZH5Hqqh0kDDxeWiU3GoZqaMvdwJD8XydOFlUf8khGei; uchome_2132_connect_login=1; uchome_2132_connect_is_bind=1; uchome_2132_connect_uin=9A9C669A39C730D9C0EF0EFFC822A8A8; uchome_2132_stats_qc_login=3; uchome_2132_security_cookiereport=3b2fpvXeTXYQyC5wQRnf46lqGztxDj%2FjSDow%2B%2FpmJkSzMm5owSt5; uchome_2132_checkpm=1; uchome_2132_noticeTitle=1; uchome_2132_nofavfid=1; uchome_2132_connect_last_report_time=2022-11-18; uchome_2132_lastact=1668756141%09connect.php%09check',
        'referer': 'https://bbs.3dmgame.com/connect.php?receive=yes&mod=login&op=callback&referer=forum.php&code=0BE178BE0F19EC49053EAC5E30D0ED72&state=a76aeca2bfbc1f706aeab365f2e50b1d',
        'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
        'sec-ch-ua-mobile': '?0',
        'sec-ch-ua-platform': '"Windows"',
        'sec-fetch-dest': 'document',
        'sec-fetch-mode': 'navigate',
        'sec-fetch-site': 'same-origin',
        'sec-fetch-user': '?1',
        'upgrade-insecure-requests': '1',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.35'
    }
    url = f'https://bbs.3dmgame.com/forum-game0day-{page}.html'
    response = requests.get(url, headers=headers, cookies=cookies,timeout=15)
    response.encoding = "gbk2312"

    soup = BeautifulSoup(response.text, 'html5lib')
    ths = soup.find_all("th")
    links = []
    for th in ths:
        alinks = th.find_all("a")
        if len(alinks) >= 3:
            link = str(alinks[2].get("href"))

            if link.endswith(".html"):
                game_name = str(alinks[2].text).strip()
                _, num, msg = baseRep.search_no_page({"name": game_name})
                if num == 0:
                    page_detail(link, url)
                    sec = random.randint(1, 3)
                    logger.info(f"等待{sec}秒")
                    time.sleep(sec)
                links.append(link)
    links = list(set(links))
    # print(links)
    # page_detail(links[0],url)


def page_detail(url: str, prev_url: str):
    # print(url)
    logger.info(f"当前爬取url:{url}")
    url = url.lower()
    if not url.startswith("https://"):
        url = "https://bbs.3dmgame.com/"+url
    headers = {
        'authority': 'bbs.3dmgame.com',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
        'cache-control': 'max-age=0',
        # Requests sorts cookies= alphabetically
        # 'cookie': 'Hm_lvt_41e10dae8e5d7e8cc907484abce82fb9=1660900409,1663132540; uchome_2132_saltkey=N1H6gGvd; uchome_2132_lastvisit=1668752445; uchome_2132_sendmail=1; uchome_2132_con_request_uri=https%3A%2F%2Fbbs.3dmgame.com%2Fconnect.php%3Fmod%3Dlogin%26op%3Dcallback%26referer%3Dforum.php; uchome_2132_client_created=1668756135; uchome_2132_client_token=9A9C669A39C730D9C0EF0EFFC822A8A8; uchome_2132_ulastactivity=1668756135%7C0; uchome_2132_auth=9c49MZO62g5arky58NV9Ns1jI7qlGIS%2BNvw86SUpNrlWaBl%2FKZH5Hqqh0kDDxeWiU3GoZqaMvdwJD8XydOFlUf8khGei; uchome_2132_connect_login=1; uchome_2132_connect_is_bind=1; uchome_2132_connect_uin=9A9C669A39C730D9C0EF0EFFC822A8A8; uchome_2132_stats_qc_login=3; uchome_2132_security_cookiereport=3b2fpvXeTXYQyC5wQRnf46lqGztxDj%2FjSDow%2B%2FpmJkSzMm5owSt5; uchome_2132_checkpm=1; uchome_2132_noticeTitle=1; uchome_2132_nofavfid=1; uchome_2132_connect_last_report_time=2022-11-18; uchome_2132_lastact=1668756141%09connect.php%09check',
        'referer': 'https://bbs.3dmgame.com/connect.php?receive=yes&mod=login&op=callback&referer=forum.php&code=0BE178BE0F19EC49053EAC5E30D0ED72&state=a76aeca2bfbc1f706aeab365f2e50b1d',
        'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
        'sec-ch-ua-mobile': '?0',
        'sec-ch-ua-platform': '"Windows"',
        'sec-fetch-dest': 'document',
        'sec-fetch-mode': 'navigate',
        'sec-fetch-site': 'same-origin',
        'sec-fetch-user': '?1',
        'upgrade-insecure-requests': '1',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.35',
    }

    response = requests.get(url, headers=headers, cookies=cookies,timeout=15)
    response.encoding = "gbk2312"
    print(response.status_code)
    soup = BeautifulSoup(response.text, 'html5lib')
    name = soup.select_one("#thread_subject").text
    # print(name)
    alinks = soup.find_all("a")
    # print(alinks)
    link = ""
    for alink in alinks:
        if str(alink.get("href")).lower().startswith("https://pan.baidu.com/") or str(alink.get("href")).lower().startswith("https://pan.quark.cn/"):
            link = str(alink.get("href"))
            break
    print(link)
    if link != "":
        text = response.text
        try:
            if "提取码:" in text:
                position = text.index("提取码:")
                pan_code = text[position:position +
                                9].replace("提取码:", "").strip()
            else:
                pan_code = ""
            game = {
                "name": name,
                "pan_link": link,
                "pan_code": pan_code,
                "status": "need_save"
            }
            logger.info("当前游戏为:"+json.dumps(game, ensure_ascii=False))
            _, num, msg = baseRep.search_no_page({"name": name})
            if num == 0:
                logger.info("插入数据库")
                baseRep.insert_one(game)
            else:
                logger.info("已经存在,不用插入数据库")
        except Exception as e:
            print(str(e))
    # sys.exit(0)


if __name__ == "__main__":
    for p in range(1, 10):
        logging.info(f"开始爬取第{p}页")
        get_page(p)
