import asyncio
import re
import time
import requests
from pyppeteer import launch
import traceback
import config
from config import mysql_conn
from items import VideoInfo

HOMEPAGE_URL = [
    'https://www.iesdouyin.com/share/user/57794230165?sec_uid=MS4wLjABAAAA1lmDoWWKqZxsiqX35kPeVW1NaGMHgwKnQoNF1bytYAs&timestamp=1599954583&utm_source=copy&utm_campaign=client_share&utm_medium=android&share_app_name=douyin',
    'https://www.iesdouyin.com/share/user/86954622963?sec_uid=MS4wLjABAAAAmeYWdy3q0Ej-GfCMfpAgGLhfY2olMYihzFKvapNaSmk&timestamp=1599954628&utm_source=copy&utm_campaign=client_share&utm_medium=android&share_app_name=douyin',
    'https://www.iesdouyin.com/share/user/86643041715?sec_uid=MS4wLjABAAAAHiWZg9-ZmFh3IEsLC7BjfYTTqI-x7JpUGzDcIJ4yjNA&timestamp=1599954669&utm_source=copy&utm_campaign=client_share&utm_medium=android&share_app_name=douyin',
]


class DouyinScrawl:
    def __init__(self, webpage_url):
        self.webpage_url = webpage_url
        self.ua = config.UA
        self.headers = {
            'authority': 'www.iesdouyin.com',
            'accept': 'application/json',
            'user-agent': self.ua.random,
            'x-requested-with': 'XMLHttpRequest',
            'sec-fetch-site': 'same-origin',
            'sec-fetch-mode': 'cors',
            'sec-fetch-dest': 'empty',
            'accept-language': 'zh-CN,zh;q=0.9',
        }
        self.u_id = ''
        self.info_url = ''
        self.u_name = ''

    async def intercept_request(self, req):
        if 'share/user' in req.url:
            self.u_id = re.findall('user/(.*?)\?sec_uid', req.url)[0]
        if 'signature' in req.url:
            self.info_url = req.url
        await req.continue_()

    async def break_list(self, **kwargs):
        kwargs['page'] = int(kwargs.pop('page', 0))
        params_list = await self.get_params_list()
        params = {
            "sec_uid": params_list['sec_uid'],
            "count": params_list['count'],
            "max_cursor": int(kwargs.get('max_cursor', 0)),
            "aid": params_list['aid'],
            "_signature": params_list['_signature'],
            "dytk": params_list['dytk'],
        }
        if kwargs['page'] > 1:
            return await self.parser_next_page(parse_params=params)
        response_json, has_more = await self.get_video_list(params)

        params['max_cursor'] = response_json.get('max_cursor')

        nextparams = {
            'max_cursor': params['max_cursor']
        }
        results = self.extract_api(response=response_json, u_code=self.u_id, u_name=self.u_name)
        return results, has_more, nextparams

    async def get_params_list(self):
        args = ['--no-sandbox']
        browser = await launch(
            headless=True,
            args=args,
            userDataDir='./',
            executablePath=r"C:\Users\lh\Documents\chrome-win\chrome.exe"
        )
        try:
            page = await browser.newPage()
            await page.setRequestInterception(True)
            page.on('request', self.intercept_request)
            await page.evaluate("""
                        () =>{
                            Object.defineProperties(navigator,{
                                webdriver:{
                                get: () => false
                                }
                            })
                        }
                    """)
            await page.goto(
                self.webpage_url,
                options={
                    "waitUntil": "networkidle0"
                }
            )
            params_list = self.info_url.split("&")
            params_list[0] = params_list[0].split("?")[1]
            params_list = {
                l[0]: l[1] for l in list(
                    map(lambda item: item.split("="), params_list)
                )
            }
            self.u_name = re.findall(r'class="nickname">(.*?)</p>', await page.content())[0]
        finally:
            await browser.close()
        return params_list

    async def parser_next_page(self, parse_params):
        u_code = self.u_id
        self.headers['user-agent'] = self.ua.random

        response_json, has_more = await self.get_video_list(parse_params)

        nextparams = {
            'max_cursor': response_json.get("max_cursor", parse_params['max_cursor'])
        }

        results = self.extract_api(
            response=response_json,
            u_code=u_code,
            u_name=self.u_name
        )

        return results, has_more, nextparams if has_more else {}

    async def get_video_list(self, params):
        response_json = {}
        has_more = False
        # start_time = int(time.time())
        # break loop if timeout
        while not has_more or not (has_more and len(response_json.get('aweme_list')) > 1):
            self.headers['user-agent'] = self.ua.random
            response = requests.get(
                url="https://www.iesdouyin.com/web/api/v2/aweme/post/",
                headers=self.headers,
                data={},
                params=params
            )
            try:
                response_json = response.json()
                has_more = response_json.get('has_more', True)
            except Exception as e:
                print(e)
                response_json = {}
            # end_time = int(time.time())
            # if end_time - start_time >= 20:
            #     break
        return response_json, has_more

    @staticmethod
    def extract_api(response, u_code, u_name):
        results = []
        for video in list(
                filter(
                    lambda x: x.get("aweme_id"),
                    response['aweme_list']
                )
        ):
            upload_ts = re.findall(r"1\d{9}", video['video']['dynamic_cover']['uri'])
            results.append(
                {
                    "vid": video['aweme_id'],
                    "duration": video['video']['duration'] // 1000,
                    "created_at": upload_ts[0] if upload_ts else 0,
                    "vurl": f"https://www.iesdouyin.com/share/video/{video['aweme_id']}/?"
                            f"region=CN&"
                            f"mid={video['aweme_id']}&"
                            f"u_code={u_code}&"
                            f"titleType=title&"
                            f"timestamp={int(time.time())}&"
                            f"utm_campaign=client_share&"
                            f"app=aweme&"
                            f"utm_medium=ios&"
                            f"tt_from=copy&"
                            f"utm_source=copy",
                    "vname": video['desc'],
                    "uname": u_name,
                }
            )
        return results


if __name__ == '__main__':
    for homepage_url in HOMEPAGE_URL:
        douyin_scrawl = DouyinScrawl(homepage_url)
        print(f'正在获取HOMEPAGE:\t{homepage_url.split("?")[0]}')
        page, has_next, max_cursor = 1, True, 0
        while has_next:
            try:
                result = asyncio.get_event_loop().run_until_complete(
                    (douyin_scrawl.break_list(page=page, max_cursor=max_cursor)))
                has_next = result[1]
                max_cursor = result[2].get('max_cursor')
                print(f'获取HOMEPAGE第{page}页成功:\t{homepage_url.split("?")[0]}! max_cursor:{max_cursor} result:{result}')
                for item in result[0]:
                    try:
                        mysql_conn.add(VideoInfo(**item))
                        mysql_conn.commit()
                        print(f'数据:\t{item}已入库.')
                    except Exception as e:
                        mysql_conn.rollback()
                page += 1
                break
            except Exception as e:
                traceback.print_stack()
                print(f'网络请求错误:\t{e}')
