import requests
from bs4 import BeautifulSoup
import json
import csv

url = 'http://data.sports.sohu.com/nba/nba_players_rank.php?order_by=points&spm=smpc.fb-nba-home.top-dc.2.1620824904040nf6byr7'
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'
}
response = requests.get(url, headers=headers).content
soup = BeautifulSoup(response, 'html.parser')

result = []
table = soup.find('table', class_='table01')
print(type(table))
rows = table.find_all('tr')
for row in rows[0:51]:
    data = {}
    cols = row.find_all('td')
    data['rank'] = cols[0].text.strip()
    data['player'] = cols[1].text.strip()
    data['team'] = cols[2].text.strip()
    data['points'] = cols[3].text.strip()
    data['rebounds'] = cols[4].text.strip()
    data['assists'] = cols[5].text.strip()
    result.append(data)
# 写入JSON文件
with open('players.json', 'w', encoding='utf-8') as f:
    json.dump(result, f, ensure_ascii=False)
# 写入CSV文件
with open('players.csv', 'w', encoding='utf-8', newline='') as f:
    writer = csv.writer(f)
    writer.writerow(['排名', '球员', '球队', '得分', '篮板', '助攻'])
    for data in result:
        writer.writerow([data['rank'], data['player'], data['team'], data['points'], data['rebounds'], data['assists']])
print("保存成功！")
