import requests
import json
from bs4 import BeautifulSoup


def scrape_nba_rankings():
    url = 'https://slamdunk.sports.sina.com.cn/rank#type=conference'

    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
    }

    try:
        response = requests.get(url, headers=headers)
        response.encoding = 'utf-8'

        if response.status_code == 200:
            soup = BeautifulSoup(response.text, 'html.parser')

            
            east_table = soup.find('div', {'id': 'eastRank'})
            west_table = soup.find('div', {'id': 'westRank'})


            teams_data = {
                '东部排名': [],
                '西部排名': []
            }


            if east_table:
                rows = east_table.find_all('tr')
                for row in rows[1:]:  # 跳过表头
                    cols = row.find_all('td')
                    if len(cols) > 1:
                        team_data = {
                            '排名': cols[0].text.strip(),
                            '球队': cols[1].text.strip(),
                            '胜场': cols[2].text.strip(),
                            '负场': cols[3].text.strip(),
                            '胜率': cols[4].text.strip()
                        }
                        teams_data['东部排名'].append(team_data)

            if west_table:
                rows = west_table.find_all('tr')
                for row in rows[1:]:  # 跳过表头
                    cols = row.find_all('td')
                    if len(cols) > 1:
                        team_data = {
                            '排名': cols[0].text.strip(),
                            '球队': cols[1].text.strip(),
                            '胜场': cols[2].text.strip(),
                            '负场': cols[3].text.strip(),
                            '胜率': cols[4].text.strip()
                        }
                        teams_data['西部排名'].append(team_data)


            with open('nba_rankings.json', 'w', encoding='utf-8') as f:
                json.dump(teams_data, f, ensure_ascii=False, indent=2)
            print("数据已保存为 nba_rankings.json")



        else:
            print(f"请求失败，状态码: {response.status_code}")

    except Exception as e:
        print(f"爬取数据出错: {e}")


if __name__ == '__main__':
    scrape_nba_rankings()