import requests
from bs4 import BeautifulSoup
import re
def get_url(url):
    url=url
    headers={'Cookie':'ARRAffinity=5337f9faa36f86559a728fdbcc126a6ba2cd'+
                      '3716ba5bc4131378e2b44b836b4a; ARRAffinitySameSite'+
                      '=5337f9faa36f86559a728fdbcc126a6ba2cd3716ba5bc41313'+
                      '78e2b44b836b4a; .AspNetCore.Antiforgery.cdV5uW_Ejgc='+
                      'CfDJ8ExtWuUNqIZIpK46MLABsgXNj1v3A72OM87OEjolrx2FiagI'+
                      'jGHShslRkvVuL-ybaTSve3N5FFrCH2DzQ1QnRjC72Clo-pyHToLU3a'+
                      'VUiROKnE-FVDsND9O2T8NWv8TH9hA1fKgFk_A535TfmlfjhnE',
        'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36'}
    # 代理ip
    with open('../bilibili播放量/two.txt' ,'r',encoding='utf-8') as f:
        proxies='{'+f.read()+'}'

    # 请求获取超链接标签，并且写入文件中
    req=requests.get(url,headers=headers,verify=False,timeout=30)
    soup=BeautifulSoup(req.content,'html.parser')
    sop=soup.find_all('a')
    for sop in sop:
        data=sop.get('data-thumbnail')
        if data is not None:
            with open('url.txt','a') as file:
                file.write(data+'\n')


# with open('networkhtml.html','w',encoding='utf-8')as f:
#     f.write(str(sop))

