import scrapy


class WeekSpider(scrapy.Spider):
    name = 'week'
    allowed_domains = ['60.208.1.200:6066']
    url = 'http://60.208.1.200:6066/'
    login_url = 'jsxsd'
    headers = {}

    class_schedule = 'jsxsd/framework/main_index_loadkb.jsp'

    def __init__(self):
        userAccount = 'J2100603'
        userPassword = '123456wanG.'
        encoded = 'SjIxMDA2MDM=%%%MTIzNTU2d2FuRy4='
        self.login_url = self.url + self.login_url + '?userAccount=' + userAccount + '&userPassword=' + userPassword + '&encoded=' + encoded

    def start_requests(self):
        yield scrapy.Request(self.login_url, callback=self.parse_cid)

    def parse_cid(self, response):
        url = self.url + self.class_schedule
        print("cookie获取：")
        cookie = response.headers.getlist('Set-Cookie')
        self.headers['Cookie'] = cookie
        print(cookie)

        yield scrapy.Request(url=url,callback=self.class_table)

    def class_table(self, response):
        print("课表")
        print(response.text)

    def parse(self, response):
        pass
