import requests
import json
from bs4 import BeautifulSoup
import csv

url = "http://data.sports.sohu.com/nba/nba_players_rank.php?order_by=points&spm=smpc.fb-nba-home.top-dc.2.1620824904040nf6byr7"
headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"}
response = requests.get(url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
table = soup.find("table", attrs={"class": "table01"})
rows = table.find_all("tr")[1:51]
data = []
for row in rows:
    cols = row.find_all("td")
    player = {"排名": cols[0].text.strip(), "球员": cols[1].text.strip(), "球队": cols[2].text.strip(),
              "得分": cols[3].text.strip(), "篮板": cols[4].text.strip(), "助攻": cols[5].text.strip(),
              "抢断": cols[6].text.strip(), "盖帽": cols[7].text.strip()}
    data.append(player)
# 写入JSON文件
with open("nba_players.json", "w", encoding="utf-8") as f:
    f.write(json.dumps(data, ensure_ascii=False))
# 写入CSV文件
with open("nba_players.csv", "w", encoding="utf-8", newline="") as f:
    writer = csv.DictWriter(f, fieldnames=["排名", "球员", "球队", "得分", "篮板", "助攻", "抢断", "盖帽"])
    writer.writeheader()
    writer.writerows(data)
