import asyncio
import json

import aiohttp
import requests
from functional import seq
from urllib import parse
import os
import re


def get_page_source(url):
    """
    获取页面源代码
    :param url:
    :return:
    """
    headers = {
        'Cookie': 'PHPSESSID=nfk8h61i6mepil534v5qush29l; 6688_3584_122.4.102.120=1; BT_auth=a69fo9ZdBzgsXXUvw4bc_BQkvevRhY6-BBfsV2fHU8TUbsnKMTQFzKhjKKKeC94r22JLV2rUWM0-3O4ReOvngz83S-T1PVIbobEERSmpJkr3_uOayK4TZN-MAk5aHv-wz1LfdlYCwbRQlj-98cCgEqIfRBpajRyEyyaM7Wh4w6JWy9ko7Q; BT_cookietime=019eAkCtCWjBUqYw4ccY8DRK7a_W9Sq_29i0sX2FdE-xeeQvJzh7; 6688_3690_122.4.102.120=1; 6688_3686_122.4.102.120=1; 6688_3588_122.4.102.120=1; vrg_go=1; 6688_3694_122.4.102.120=1; vrg_sc=ebb6e7103de78fb64249ce4303228d32; beitouviews_6688=kWwJFtCfvS%252BVQe3B5z4igeWjPm1rNBW6cxgd34g3WHaUgakgkLEmboxNh14u8W%252BDpBuRKl%252Fm1y42SSkucR1PIz3ksdA46nj5bpCrKbJQTPHR31Q05ytbDGmbXaGUau4hHURzTrj8r340Npa9Oal5Qy1%252Brlk8uQVNWPa7oIZTn5tugYjavd8%252FKGJtv7WQPx%252FzFFvEQ5zdV8rRYflYsPRPFyU%252B16koBf4jGEX9npjxL2Lmw2I16%252Fin2dgYLfordEPNdgwZIdS8qDTKyjh5I66pepFb6C8iVhFPFyZHdafZM0zhCLMSVTfI%252FMEVrllL7kOVRXHeCKmSeUVd2CvTjDvU%252Bg%253D%253D',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
        'Referer': 'https://www.btnull.si/mv/88VX.html',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7'
    }
    response = requests.get(url=url, headers=headers)
    response.encoding = 'utf-8'
    return response.text


def get_page_source_content(url):
    """
    获取页面源代码
    :param url:
    :return:
    """
    headers = {
        'Cookie': 'PHPSESSID=nfk8h61i6mepil534v5qush29l; 6688_3584_122.4.102.120=1; BT_auth=a69fo9ZdBzgsXXUvw4bc_BQkvevRhY6-BBfsV2fHU8TUbsnKMTQFzKhjKKKeC94r22JLV2rUWM0-3O4ReOvngz83S-T1PVIbobEERSmpJkr3_uOayK4TZN-MAk5aHv-wz1LfdlYCwbRQlj-98cCgEqIfRBpajRyEyyaM7Wh4w6JWy9ko7Q; BT_cookietime=019eAkCtCWjBUqYw4ccY8DRK7a_W9Sq_29i0sX2FdE-xeeQvJzh7; 6688_3690_122.4.102.120=1; 6688_3686_122.4.102.120=1; 6688_3588_122.4.102.120=1; vrg_go=1; 6688_3694_122.4.102.120=1; vrg_sc=ebb6e7103de78fb64249ce4303228d32; beitouviews_6688=kWwJFtCfvS%252BVQe3B5z4igeWjPm1rNBW6cxgd34g3WHaUgakgkLEmboxNh14u8W%252BDpBuRKl%252Fm1y42SSkucR1PIz3ksdA46nj5bpCrKbJQTPHR31Q05ytbDGmbXaGUau4hHURzTrj8r340Npa9Oal5Qy1%252Brlk8uQVNWPa7oIZTn5tugYjavd8%252FKGJtv7WQPx%252FzFFvEQ5zdV8rRYflYsPRPFyU%252B16koBf4jGEX9npjxL2Lmw2I16%252Fin2dgYLfordEPNdgwZIdS8qDTKyjh5I66pepFb6C8iVhFPFyZHdafZM0zhCLMSVTfI%252FMEVrllL7kOVRXHeCKmSeUVd2CvTjDvU%252Bg%253D%253D',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
        'Referer': 'https://www.btnull.si/mv/88VX.html',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7'
    }
    response = requests.get(url=url, headers=headers)
    response.encoding = 'utf-8'
    return response.content


def get_mix_m3u8(url):
    m3u8_index = get_page_source(url)

    # 在JS文件中提取数据用RE
    pre_data = re.compile(r'"url":"(?P<Url>.*?)","select"')
    result = pre_data.finditer(m3u8_index)
    url = (next(result).group('Url')).replace('\\', '')

    m3u8_mix = get_page_source(url)
    pre_data1 = re.compile(r'RESOLUTION=1080x608\n(?P<mix_url>.*)', re.MULTILINE)
    result = pre_data1.finditer(m3u8_mix)
    m3u8_mix_url = next(result).group('mix_url').strip()
    m3u8_mix_url = parse.urljoin(url, m3u8_mix_url)
    return m3u8_mix_url


def get_video_url_list(url):
    page = get_page_source(url)
    url_list = page.split()

    def get_real_video_part_url(r):
        new_url = parse.urljoin(url, r)
        return new_url

    url_list = seq(url_list).filter(lambda r: r.endswith('.ts')).map(lambda r: get_real_video_part_url(r)).to_list()
    json_data = {'url': url_list}
    json_data = json.dumps(json_data)
    path = './mix'
    if not os.path.exists(path) or not os.path.isdir(path):
        os.makedirs(path)
    with open(f'{path}/mix.json', encoding='utf-8', mode='w') as fl:
        fl.write(json_data)


async def down_video_data(url):
    path = './video/callme'
    if not os.path.exists(path) or not os.path.isdir(path):
        os.makedirs(path)
    file_name = url.split('/')[-1]
    for i in range(10):
        try:
            async with aiohttp.ClientSession() as session:
                async with session.get(url, ssl=False, timeout=30) as response:
                    binary = await response.content.read()
                    with open(f'{path}/{file_name}', mode='wb') as fl:
                        fl.write(binary)
                        print(f'{file_name}下载完成')
            print(f'{file_name}下载成功')
            break
        except Exception:
            print(f'{file_name}下载失败')
            await asyncio.sleep((i + 1) * 5)


async def get_video_data():
    tasks = []
    with open('./mix/mix.json', mode='r') as fl:
        data = fl.read()
        url_list = json.loads(data)['url']
        for url in url_list:
            task = asyncio.create_task(down_video_data(url))
            tasks.append(task)
        await asyncio.wait(tasks)


def union_data():
    path = './mix/mix.json'
    with open(path, encoding='utf-8', mode='r') as fl:
        content = fl.read()
        url_list = json.loads(content)['url']
        url_list = seq(url_list).map(lambda r: r.split('/')[-1].strip()).to_list()
        sorted_url = sorted(url_list)
    # now_dir = os.getcwd()
    # new_dir = os.chdir('./video/callme')
    # os.system()
    # # 操作完成后把工作目录切回来
    # os.chdir(now_dir)

    with open('./video/callme/full/callme_video.ts', 'ab+') as fl:
        for item in sorted_url:
            with open(f'./video/callme/{item}', 'rb') as fr:
                content = fr.read()
                fl.write(content)


def per_100_combine():
    os.chdir('./video/callme')
    dir_list = os.listdir()
    n = 1
    temp = []
    for i in range(len(dir_list) - 1):
        name = dir_list[i]
        temp.append(name)
        if i != 0 and i % 100 == 0:
            names = "+".join(temp)
            os.system(f'copy /b {names} {n}.ts')
            temp = []
            n += 1
    # 收尾
    names = "+".join(temp)
    os.system(f'copy /b {names} {n}.ts')

    # final_combine
    os.chdir('./video/callme')
    final_list = [f'{i}.ts' for i in range(1, 21)]
    name = "+".join(final_list)
    os.system(f'copy /b {name} callme.mp4')


def main():
    mix_url = get_mix_m3u8(url='https://www.btnull.si/py/EYzXa_1.html')
    get_video_url_list(url=mix_url)
    loop = asyncio.get_event_loop()
    loop.run_until_complete(get_video_data())
    # 合并文件 way1 二进制写入
    union_data()
    # 合并文件 way2 使用操作系统底层命令
    per_100_combine()


if __name__ == '__main__':
    main()
