# -*- coding: utf-8 -*-
import re

import scrapy
from scrapy import Request

from gamingdata import settings
from gamingdata.items import GameItem, PlayerGameItem


class GamingSpider(scrapy.Spider):
    name = 'Gaming'
    allowed_domains = ['www.stat-nba.com/']
    start_urls = ['http://www.stat-nba.com/gameList_simple-1985-01.html']

    start_page = 'http://www.stat-nba.com/gameList_simple-1985-01.html'

    def start_requests(self):
        yield Request(self.start_page, callback=self.parse_year)

    # 获取每年的数据
    def parse_year(self, response):
        year_list = response.css('.chooser::attr(href)').extract()
        ci = response.css('.chooserin::attr(href)').extract_first()
        year_list.append(ci)
        for yearurl in year_list:
            year_page = response.urljoin(yearurl)
            yield Request(year_page, callback=self.parse_month, dont_filter=True)
        # yield Request(response.urljoin(year_list[0]), callback=self.parse_month, dont_filter=True)

    # 获取每一年的每个月的url
    def parse_month(self, response):
        month_urls = response.css('.chooserl::attr(href)').extract()
        month_urls.append(response.css('.chooserlin::attr(href)').extract_first())
        for month in month_urls:
            next_page = response.urljoin(month)
            yield Request(next_page, callback=self.get_gaming_urls, dont_filter=True)
        # yield Request(response.urljoin(month_urls[0]), callback=self.get_gaming_urls, dont_filter=True)

    # 获取每个月每场比赛的url
    def get_gaming_urls(self, response):
        game_urls = response.css('.cheight a::attr(href)').extract()
        channels = ['game', 'player']
        if game_urls:
            game_page = response.urljoin(game_urls[0])
            for channel in channels:
                yield Request(game_page, callback=getattr(self, 'parse_' + channel, self.parse), dont_filter=True)

    '''
        获取每场比赛的球队的数据
    '''

    def parse_game(self, response):
        req_url = response.url
        game_id_pattern = re.compile('/game/(.*?).html')
        game_id = re.findall(game_id_pattern, req_url)
        game_id = game_id[0]
        text = response.css('.title::text').extract_first()
        season_pattern = re.compile('(.*?)赛季\n(.*?)$', re.S)
        season_result = re.findall(season_pattern, text)
        if season_result:
            # 赛季
            season = season_result[0][0]
            # 1-常规赛 2-季后赛 3-总决赛
            if season_result[0][1] == '常规赛':
                type = 1
            elif season_result[0][1] == '季后赛':
                type = 2
            else:
                type = 3
        # 比赛日期
        game_date = response.css('#background > div:nth-child(7) > div:nth-child(2)::text').extract_first()
        # 选择class=teamDiv的标签下一级的a标签
        teams = response.css('.teamDiv >a::attr(href)').extract()
        team_pattern = re.compile('/team/(.*?).html')
        if teams:
            away_team = re.findall(team_pattern, teams[0])[0]
            home_team = re.findall(team_pattern, teams[1])[0]
        scores = response.css('.scorebox .text .score::text').extract()
        if scores:
            away_score = scores[0]
            home_score = scores[1]
        away_scores = response.css('.scorebox > div:nth-child(2) .number::text').extract()
        home_scores = response.css('.scorebox > div:nth-child(3) .number::text').extract()
        quarters = [str(i) for i in range(1, len(away_scores) + 1)]
        away_quarter_score = zip(quarters, away_scores)
        home_quarter_score = zip(quarters, home_scores)
        # 主客场教练编号及姓名
        away_coach_num = response.css('#stat_box' + away_team + ' > div:nth-child(2) > div > a::attr(href)').re_first(
            '/coach/(.*?).html')
        away_coach_name = response.css('#stat_box' + away_team + ' > div:nth-child(2) > div > a::text').extract_first()
        home_coach_num = response.css('#stat_box' + home_team + ' > div:nth-child(2) > div > a::attr(href)').re_first(
            '/coach/(.*?).html')
        home_coach_name = response.css('#stat_box' + home_team + ' > div:nth-child(2) > div > a::text').extract_first()

        away_team_stat = response.css('#stat_box' + away_team + ' .team_all_content>td:not(.normal)::text').extract()
        home_team_stat = response.css('#stat_box' + home_team + ' .team_all_content>td:not(.normal)::text').extract()

        away_team_stat = zip(settings.TEAM_STAT, away_team_stat)
        home_team_stat = zip(settings.TEAM_STAT, home_team_stat)

        item = GameItem()
        item['game_id'] = game_id
        item['season'] = season.strip('\n')
        item['type'] = type
        item['game_date'] = game_date
        item['away_team'] = away_team
        item['home_team'] = home_team
        item['away_score'] = away_score
        item['home_score'] = home_score
        item['away_coach_num'] = away_coach_num
        item['away_coach_name'] = away_coach_name
        item['home_coach_num'] = home_coach_num
        item['home_coach_name'] = home_coach_name
        item['away_quarter_score'] = dict(away_quarter_score)
        item['home_quarter_score'] = dict(home_quarter_score)
        item['away_team_stat'] = dict(away_team_stat)
        item['home_team_stat'] = dict(home_team_stat)
        return item

    '''
        提取每场比赛的每个球员的数据
    '''

    def parse_player(self, response):
        req_url = response.url
        game_id_pattern = re.compile('/game/(.*?).html')
        game_id = re.findall(game_id_pattern, req_url)
        game_id = game_id[0]

        # 选择class=teamDiv的标签下一级的a标签
        teams = response.css('.teamDiv >a::attr(href)').extract()
        team_pattern = re.compile('/team/(.*?).html')
        if teams:
            away_team = re.findall(team_pattern, teams[0])[0]
            home_team = re.findall(team_pattern, teams[1])[0]

        away_players_ses = response.css('#stat_box' + away_team + ' .sort')
        home_players_ses = response.css('#stat_box' + home_team + ' .sort')

        players_data = []
        for ps in away_players_ses:
            player_num = ps.re_first('/player/(.*?).html')
            player_name = ps.css('td a::text').extract_first()
            player_data = ps.css('td::text').extract()
            player_data.insert(0, game_id)
            player_data.insert(1, away_team)
            player_data.insert(2, player_num)
            player_data.insert(3, player_name)
            players_data.append(player_data)

        for ps in home_players_ses:
            player_num = ps.re_first('/player/(.*?).html')
            player_name = ps.css('td a::text').extract_first()
            player_data = ps.css('td::text').extract()
            player_data.insert(0, game_id)
            player_data.insert(1, home_team)
            player_data.insert(2, player_num)
            player_data.insert(3, player_name)
            players_data.append(player_data)

        for item in players_data:
            player_item = PlayerGameItem()
            player_item['game_id'] = item[0]  # 比赛id
            player_item['team'] = item[1]  # 球队id
            player_item['player_id'] = item[2]  # 运动员id
            player_item['player_name'] = item[3]  # 运动员名称
            player_item['gs'] = item[4]  # 是否首发 1-是,0-否
            player_item['mp'] = item[5]  # 出场时间
            player_item['fgper'] = item[6]  # 投篮
            player_item['fg'] = item[7]  # 投篮命中率
            player_item['fga'] = item[8]  # 出手
            player_item['threepper'] = item[9]
            player_item['threep'] = item[10]
            player_item['threepa'] = item[11]
            player_item['ftper'] = item[12]
            player_item['ft'] = item[13]
            player_item['fta'] = item[14]
            player_item['ts'] = item[15]
            player_item['trb'] = item[16]
            player_item['orb'] = item[17]
            player_item['drb'] = item[18]
            player_item['ast'] = item[19]
            player_item['stl'] = item[20]
            player_item['blk'] = item[21]
            player_item['tov'] = item[22]
            player_item['pf'] = item[23]
            player_item['pts'] = item[24]
            yield player_item
