# 原理：通过第三方 IP 发送请求
# 原：a -> b
# 代理： a -> c1 -> b
#       a -> c2 -> b
#       a -> c3 -> b
#       a -> c4 -> b
#       a -> c5 -> b

# 代理是灰色地带：慎用

import requests
import re
from lxml import etree

RETRY_COUNT = 5
mainUrlPrefix = "https://space.bilibili.com/"
followUrl = "https://api.bilibili.com/x/relation/stat?vmid={}&jsonp=jsonp"
viewAndLikeUrl = "https://api.bilibili.com/x/space/upstat?mid={}&jsonp=jsonp"

headers = {
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36",
    "referer": ""
}

def get_proxy_count():
    # 获取数量
    return requests.get("http://127.0.0.1:5010/count/").json()

def get_all_proxy():
    # 获取全部
    allProxy = requests.get("http://127.0.0.1:5010/all/?type=https").json()
    proxies = []
    for proxy in allProxy:
        proxies.append(proxy.get("proxy"))
    return proxies

def get_random_proxy():
    # 随机获取 1 个
    proxy = requests.get("http://127.0.0.1:5010/get/?type=https").json()
    return proxy.get("proxy")

def delete_proxy(proxy):
    requests.get("http://127.0.0.1:5010/delete/?proxy={}".format(proxy))

def getInfo(uid):
    proxies = get_all_proxy()
    for proxy in proxies:
        try:
            # 使用代理访问
            success = realGetInfo(uid, proxy)
            if success:
                break
        except Exception:
            print(Exception)
        # 删除代理池中代理
        delete_proxy(proxy)


def realGetInfo(uid, proxy):
    url = mainUrlPrefix + uid
    resp = requests.get(url, proxies={"https": "https://{}".format(proxy)})
    resp.encoding = 'utf-8'
    html = resp.text
    nameRule = re.compile(r'<title>(?P<name>.*?)的个人空间', re.S)
    signRule = re.compile(r'第一时间了解UP注动态。(?P<sign>.*?)"/><meta', re.S)
    name = nameRule.search(html).group("name")
    sign = signRule.search(html).group("sign")
    # print("name=" + name + "    sign=" + sign)

    # 设置防盗链
    headers["referer"] = url
    # print(headers)

    followUrlR = followUrl.format(uid)
    # print(followUrlR)
    resp = requests.get(followUrlR, headers=headers, proxies={"https": "https://{}".format(proxy)})
    # print(resp.json())
    follower = resp.json()["data"]["follower"]
    # print(follower)

    viewAndLikeUrlR = viewAndLikeUrl.format(uid)
    # print(viewAndLikeUrlR)
    resp = requests.get(viewAndLikeUrlR, headers=headers, proxies={"https": "https://{}".format(proxy)})
    # print(resp.json())
    view = resp.json()["data"]["archive"]["view"]
    likes = resp.json()["data"]["likes"]
    # print(view, likes)

    return True



if __name__ == '__main__':
    # {'count': 52, 'http_type': {'http': 47, 'https': 5}, 'source': {'freeProxy02': 2, 'freeProxy03': 15, 'freeProxy05': 1, 'freeProxy06': 25, 'freeProxy10': 10}}
    # print(get_proxy_count())

    # allProxy = get_all_proxy()
    # print(allProxy)
    #
    # randomProxy = get_random_proxy()
    # print(randomProxy)

    getInfo("408650439")

