# -*- coding: utf-8 -*-
"""
@Time : 2024/5/27 14:32
@Author : ChenXiaoliang
@Email : middlegod@sina.com
@File : get_mblog.py
"""
import sys

import requests
import json
import time
import sys

from get_userid_by_name import get_userid_by_name


def get_mblog(uid, top_n):
    """
    获取特定用户的微博博文数据
    :param uid: 用户id
    :param top_n: 微博分页数
    :return: 返回微博数据列表
    """
    # 爬取结果
    target = []

    # 爬取开始时间
    t_start = time.time()

    # 爬取进度条初始化
    a = '=' * 0
    b = '.' * top_n
    t = time.time() - t_start
    time.sleep(1)
    print("\r{:.2f}%[{}->{}]{:.2f}s".format(100 * (0 / top_n), a, b, t), end="")

    for i in range(1, top_n + 1):
        url = f"https://weibo.com/ajax/statuses/mymblog?uid={uid}&page={i}&feature=0"
        headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
            "Cookie": "SUB=_2AkMRUOPezr3SUzDg2yl"}
        r = requests.get(url, headers=headers)
        try:
            data = json.loads(r.text)
        except Exception as e:
            sys.exit(f"发生异常{e.__class__.__name__}，退出执行，需要替换请求headers中Cookie的SUB值"
                     f"SUB位置：浏览器F12打开开发者工具，应用-存储-Cookie，找Domain域名是.weibo.com的名称SUB的值替换。")
        blogs = data.get("data").get("list")

        for blog in blogs:
            target.append({"text": blog.get("text_raw"), "address_site": blog.get("region_name"),
                           "publish_time": blog.get("created_at")})
        # 爬取进度条
        a = '=' * i
        b = '.' * (top_n - i)
        t = time.time() - t_start
        time.sleep(1)
        print("\r{:.2f}%[{}->{}]{:.2f}s".format(100 * (i / top_n), a, b, t), end="")

    return target


if __name__ == '__main__':
    search_name = input("爬取用户名> ")
    uid = get_userid_by_name(search_name)
    page_cnt = int(input("爬取分页数> "))
    blogs_data = get_mblog(uid, page_cnt)
    # for blog in blogs_data:
    #     print(blog.get('text'))
    #     print("=" * 50)
    format_time = time.strftime("%Y%m%d%H%M%S", time.localtime())
    with open(f'{search_name}_blogs_{format_time}.json', 'a+', encoding='utf-8') as f:
        json.dump({search_name: blogs_data}, f, ensure_ascii=False, indent=4)
