"""
爬虫思路
（一）数据来源分析
    1、需求分析
        1.1 获取快手热榜的所有视频中的
            - 视频URL
            - 视频标题
            - 分类
            - 评论
            - 点赞数
            - 播放量
    2、接口分析
        2.1 确定是动态数据还是静态数据
            - 在源代码中搜索关键字，如果有  ==>  静态的；反之动态的。


（二）爬虫代码实现
    1、发送请求
    2、获取数据
    3、解析数据
    4、保存数据
        - 保存为CSV文件
            导入： import csv  自带
            读取：
            写入：
"""
import csv
import os.path

import httpx
import json
import re


def get_rank(url, headers, cookies, params):
    with httpx.Client() as client:
        response = client.get(url, headers=headers, cookies=cookies, params=params)
        return response.text


def parse_rank(html):
    result_list = []
    json_str = re.findall(r'<script>window.__APOLLO_STATE__=(.*?);\(function\(\).*;</script>', html, re.S)[0]
    json_dict = json.loads(json_str)

    for key in json_dict['defaultClient']:
        if re.findall(r'VisionHotRankItem.*', key):
            result_dict = {}
            result_dict['rank'] = json_dict['defaultClient'][key]['rank']
            result_dict['name'] = json_dict['defaultClient'][key]['name']
            result_dict['hotValue'] = json_dict['defaultClient'][key]['hotValue']
            result_list.append(result_dict)

    return result_list  # [{}, {}, ...]


def save_data(data):
    if not os.path.exists('./数据'):
        os.mkdir('./数据')

    with open('./数据/榜单.csv', 'w', encoding='utf-8-sig', newline='') as f:
        # CSV的写入
        # csv_writer = csv.writer(f)  # 写入列表
        head = ['rank', 'name', 'hotValue']              # 创建表头
        csv_writer = csv.DictWriter(f, fieldnames=head)  # 创建CSV写手
        csv_writer.writeheader()    # 写入表头
        csv_writer.writerows(data)  # 写入多个字典


def main():
    headers = {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
        "Cache-Control": "no-cache",
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Referer": "https://www.kuaishou.com/new-reco",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-User": "?1",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36 Edg/113.0.1774.42",
        "sec-ch-ua": "\"Microsoft Edge\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\""
    }
    cookies = {
        "kpf": "PC_WEB",
        "clientid": "3",
        "did": "web_842a80c5884396e5c40f99a0b3366ad3",
        "userId": "3498836671",
        "kpn": "KUAISHOU_VISION",
        "kuaishou.server.web_st": "ChZrdWFpc2hvdS5zZXJ2ZXIud2ViLnN0EqABGG7S_A4gdckkMPiIxTSl2jnPe0mXDySQgh5iYibmbV6STxa5qlozXiDFa3yXG6rRBskeBXdag-ZCGf3mO3tMnm-EonekX2rp_z3GFrS7ojPZ0v3b8FQC2CcBEspMQ1Q3XpkfM0-LGzARG3IQH21HcUXvNLH5fUhspIti_LdYeOtiKb8qv2wwY4x4nBmIwUpqyeYxVGO12nf7vsgBLReR2RoS6uws2LN-siMyPVYdMaXTUH7FIiDBrMD560JUnPveMqddHJ0ipKvr9d1HWBYfYfN4KbrfkCgFMAE",
        "kuaishou.server.web_ph": "43c7a68c49f94f8254db714ff711594d2150"
    }
    url = "https://www.kuaishou.com/"
    params = {
        "isHome": "1"
    }
    html = get_rank(url, headers, cookies, params)
    data = parse_rank(html)
    save_data(data)


if __name__ == '__main__':
    main()
