import requests
from bs4 import BeautifulSoup
import openpyxl
url = 'https://s.weibo.com/top/summary?cate=realtimehot'
headers={
    'Cookie':'SINAGLOBAL=2101072499546.719.1682298658739; UOR=,,www.baidu.com; SUB=_2A25IPLirDeRhGeBN6lIV8SnIzTuIHXVr3tjjrDV8PUJbkNB-LWehkW1NRIB8sUYVX0o1wczMxSwjFCa7v9alFzYn; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9WWCAc1eDmFUrl.mEYsbYMOP5NHD95Qce027Sh2NShqNWs4Dqcjx-GiadgL09CH8SFHF1FHFS..t; _s_tentry=www.baidu.com; Apache=8505708502897.99.1703223834360; ULV=1703223834377:3:1:1:8505708502897.99.1703223834360:1698220253045',
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 Edg/120.0.0.0'
}
res = requests.get(url,headers=headers)
soup = BeautifulSoup(res.text,'html.parser')
data = soup.select('.data')[0]
aList = data.find_all('a')
newList=[['标题','链接']]
for a in aList:
    newList.append([a.text,f'https://s.weibo.com{a["href"]}'])
print(newList)
workhood = openpyxl.Workbook()
sheet = workhood.active
for row in newList:
    sheet.append(row)
workhood.save('D:\\2.xlsx')
workhood.close()
