import requests


class Spider:

    def __init__(self, url):
        self.url = url
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36'
        }

    def crawl(self):
        """爬取数据"""
        response = requests.get(self.url, self.headers)
        if response.status_code == 200:
            data = response.json()
        else:
            print('服务器响应异常')
            data = {}
        return data

    def get_team_name(self, team):
        """获取球队的名字"""
        city = team.get('city')
        name = team.get('name')
        return f'{city} {name}'

    def get_player_name(self, player):
        name = player.get('displayName')
        return name

    def get_player_id(self, player):
        player_id = player.get('playerId')
        return player_id

    def parse(self, data):
        """解析数据"""
        code = data.get('code')
        if code != 0:
            print('解析异常')
            return
        else:
            data = data['data']
            # 季度
            season = data.get('season')

            # 队伍
            teams = data.get('player', {}).get('Data', [])
            for team in teams:
                team_obj = team['Team']
                player_obj = team['Player']
                team['team'] = self.get_team_name(team_obj)
                team['player'] = self.get_player_name(player_obj)
                team['player_id'] = self.get_player_id(player_obj)

            return teams

    def run(self):
        """"""
        data = self.crawl()
        res = self.parse(data)
        return res


if __name__ == '__main__':
    # url = "https://api.nba.cn/sib/v2/league/teamstats?app_key=tiKB2tNdncnZFPOi&os_type=3&os_version=10.0.0&device_id=1b53d11f88f3918abc359c1b8292ac54&app_version=1.1.0&network=wifi&install_id=202403291&channel=nba-server&t=1712575064&sign=4ddc95cb33c2ace359ea44579e50913b&season=2023&seasonType=2&conference=All&division=All&statType=Pointspg"
    # spider = Spider(url)
    # spider.run()
    import json
    