from scrapy import Request
import json
import datetime
import scrapy
from scrapyspider.items import Contest


class SaikrSpider(scrapy.Spider):
    name = 'contest_saikr'
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36',
    }

    def start_requests(self):
        for i in range(2,7):
            url = 'https://www.saikr.com/vs/0/%d/0/'%i
            yield Request(url, headers=self.headers)

    def parse(self, response):
        contests = response.xpath('//li[@class="item clearfix"]')
        for li in contests:
            contest = Contest()
            contest_time = li.xpath('div//p/text()').extract()[3].split('～')
            signup_time = li.xpath('div//p/text()').extract()[2].split('～')
            organizer = li.xpath('div//p/text()').extract()[0]
            level = li.xpath('div//p/text()').extract()[1]
            contest['contest_name'] =li.xpath('div/h3[@class="tit"]/a/text()').extract()[0].strip()
            contest['contest_url'] =li.xpath('div//a[contains(@class,"offical-apply")]/@href').extract() #[0].strip()
            contest['signup_start_time'] = signup_time[0]
            contest['signup_end_time'] = signup_time[1]
            contest['contest_start_time'] = contest_time[0]
            contest['contest_end_time'] = contest_time[1]
            contest['sponsor_id'] = 2
            contest['hot_degree'] = 0

            if(contest['contest_url']):
                contest['contest_url'] = contest['contest_url'][0]
                yield contest
                pass
            print(contest['contest_name'],
                  contest['contest_url'],
                  contest['signup_start_time'],
                  contest['signup_end_time'],
                  contest['contest_start_time'],
                  contest['contest_end_time'])
        next_url =response.xpath('//li[@class="next"]/a/@href').extract()
        if( next_url):
            next_url = 'https://www.saikr.com' + next_url[0].strip()
            yield Request(next_url, headers=self.headers)







