#!/usr/bin/env python
# -*- coding:utf-8 -*-

"""
@author zyx
@since 2022/2/18 08:38
@file: c01_美剧网爬取.py
"""

import requests
from urllib.parse import urljoin
import re
import os
# 需要安装 pip install pycryptodome
from Crypto.Cipher import AES

dirName = 'tsLib'
if not os.path.exists(dirName):
    os.mkdir(dirName)

headers = {
    'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) '
                  'Chrome/98.0.4758.102 Safari/537.36'
}
# 一级m3u8地址
m1_url = "https://vod11.bdzybf.com/20220127/5iTXjEev/index.m3u8"
# ============================= 已崩 (╥╯^╰╥)
m1_page_text = requests.get(url=m1_url, headers=headers).text
print(m1_page_text)
# 从一级m3u8文件中解析出二级m3u8地址
m1_page_text = m1_page_text.strip()  # 取出收尾的回车
# 二级m3u8地址
m2_url = ''
for line in m1_page_text.split('\n'):
    if not line.startswith('#'):
        m2_url = line
        # 将m1_url和m2_url不同之处补充到m2_url中
        m2_url = urljoin(m1_url, m2_url)
        # 至此就获取到了完整的二级文件地址
# 请求二级文件地址内容
m2_page_text = requests.get(url=m2_url, headers=headers).text
m2_page_text = m2_page_text.strip()
# print(m2_page_text)

# 解析出解密秘钥key的地址
key_url = re.findall('URI="(.*?)"', m2_page_text, re.S)[0]
key_url = urljoin(m1_url, key_url)
# print(key_url)
# 请求key的地址，获取秘钥
# 注意：key和iv需要为bytes类型
key = requests.get(url=key_url, headers=headers).content
iv = b"0000000000000000"
# 解析出每一个ts切片的地址
ts_url_list = []
for line in m2_page_text.split('\n'):
    if not line.startswith('#'):
        ts_url = line
        ts_url = urljoin(m1_url, ts_url)
        ts_url_list.append(ts_url)
# print(ts_url_list)
# 请求到每一个ts切片的数据
for url in ts_url_list:
    # 获取ts片段的数据
    ts_data = requests.get(url=url, headers=headers).content
    # 需要对ts片段数据进行解密（需要用到key和iv）
    aes = AES.new(key=key, mode=AES.MODE_CBC, iv=iv)
    desc_data = aes.decrypt(ts_data)  # 获取了解密后的数据
    ts_name = url.split('/')[-1]
    ts_path = dirName + '/' + ts_name
    with open(ts_path, 'wb') as fp:
        # 需要将解密后的数据写入文件进行保存
        fp.write(desc_data)
    print(ts_name, '下载保存成功！')

# ts文件的合并，最好网上找专业的工具进行合并，自己手动合并会经常出问题
