import requests
import pandas as pd
from bs4 import BeautifulSoup
import json


def get_team_data(url):
    response = requests.get(url)
    if response.status_code != 200:
        print(f"请求失败，状态码: {response.status_code}")
        return None
    soup = BeautifulSoup(response.text, 'html.parser')
    east_table = soup.find('table', {'data-conference': 'east'})
    west_table = soup.find('table', {'data-conference': 'west'})
    all_data = []
    for table in [east_table, west_table]:
        rows = table.find_all('tr')[1:]
        for row in rows:
            cols = row.find_all('td')
            data = {
                '排名': cols[0].text.strip(),
                '球队': cols[1].text.strip(),
                '胜': int(cols[2].text.strip()),
                '负': int(cols[3].text.strip()),
                '胜率': cols[4].text.strip(),
                '胜差': cols[5].text.strip(),
                '得分': float(cols[6].text.strip()),
                '失分': float(cols[7].text.strip()),
                '分差': float(cols[8].text.strip()),
                '主场战绩': cols[9].text.strip(),
                '客场战绩': cols[10].text.strip(),
                '分部战绩': cols[11].text.strip(),
                '分区战绩': cols[12].text.strip(),
                '最近10场': cols[13].text.strip(),
                '连胜连负': cols[14].text.strip()
            }
            all_data.append(data)
    return all_data


def save_to_json(data, file_path):
    with open(file_path, 'w', encoding='utf-8') as f:
        json.dump(data, f, ensure_ascii=False, indent=4)


def save_to_csv(data, file_path):
    df = pd.DataFrame(data)
    df.to_csv(file_path, index=False, encoding='utf-8-sig')


if __name__ == "__main__":
    url = "https://slamdunk.sports.sina.com.cn/rank#type=conference"
    team_data = get_team_data(url)
    if team_data:
        save_to_json(team_data, 'team_data.json')
        save_to_csv(team_data, 'team_data.csv')
        print("数据已成功保存为JSON和CSV文件。")