#!/usr/bin/python
# -*- coding: UTF-8 -*-
import scrapy
import json
import re
from seasonSpider.dbHelper import dbHelper

class BilibiliSpider(scrapy.Spider):
	
	name = "bilibili"
	allowed_domains = ["bilibili.org"]
	start_urls = [
		"https://bangumi.bilibili.com/web_api/timeline_global"
    ]

	def parse(self, response):
		filename = "bilibili.log"
		with open(filename, 'wb') as f:
			f.write(response.body)

		js = json.loads(response.body)
		for resultItem in js["result"]:
			week = resultItem["day_of_week"]
			for seasonItem in resultItem["seasons"]:
				week = week
				name = seasonItem["title"]
				cover = seasonItem["cover"]
				square_cover = seasonItem["square_cover"]
				delay = seasonItem["delay"]
				season_status = seasonItem["season_status"]
				if delay == 0:
					pub_index = seasonItem["pub_index"]
				else:
					pub_index = seasonItem["delay_index"]
				pl = 1
				dbHelper.updata_season(name,pub_index,week,delay,season_status,cover,square_cover,pl)