# -*- coding:utf8 -*-

import re, sys, json, datetime, random, time
from scrapy.selector import Selector
from scrapy.exceptions import CloseSpider
try:
    from scrapy.spiders import Spider
except:
    from scrapy.spider import BaseSpider as Spider

from scrapy.http import Request, FormRequest
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc

from gaokaopai.items import *
from gaokaopai.dao import *
from gaokaopai.util import *

import demjson
import importlib

importlib.reload(sys)
# sys.setdefaultencoding("utf-8")

class BaoKaoDaXueSpider(Spider):
    name        = 'baokaodaxue'
    allow       = ['baokaodaxue.com']

    def __init__(self, *args, **kwargs):
        super(BaoKaoDaXueSpider, self).__init__(*args, **kwargs)

    def start_requests(self):
        list_university = getListBKUniversity()
        if len(list_university) > 0:
            for bk in list_university:
                updateBKUniversity(bk['id'], 2)

                name = bk['name']
                cid = bk['cid']

                listProvince = getListProvince()
                for province in listProvince:
                    bkid = province['bkid']
                    if bkid != '':
                        for wl in [1, 2]:
                            for year in [2017, 2016]:
                                url = "http://www.baokaodaxue.com/yd/v3college/zsjh?kq=%s&cid=%s&wl=%s&year=%s" % (bkid, cid, wl, year)

                                yield Request(url, callback=self.parse_plan, dont_filter=True, meta={'name':name, 'provinceid':province['id'], 'cid':cid, 'wl':wl, 'year':year})


    def parse_plan(self, response):
        meta = response.meta

        name    = meta['name']
        cid     = meta['cid']
        wl      = meta['wl']
        year    = meta['year']
        provinceid = meta['provinceid']

        university = getUniversity(name)
        if university == None:
            fid = ''
        else:
            fid = university['fid']

        data = demjson.decode(response.body)
        if(len(data['extraData'])):
            for item in data['extraData']:
                plan = StudentPlan()
                plan['table']   = 't_university_student_plan'
                plan['fid']     = fid
                plan['bk_name'] = name
                plan['bk_id']   = cid
                plan['provinceid'] = provinceid
                if wl == 2:
                    plan['type'] = 1
                else:
                    plan['type'] = 2
                plan['year']    = year
                plan['major']   = item['major']
                plan['code']    = item['code']
                plan['count']   = item['jhrs']

                yield plan
        else:
            print("nodata")

    def get_header(self, referer):

        agents = self.settings.getlist('USER_AGENTS')
        random_agent = random.choice(agents)

        headers = {
            'Referer': referer,
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
            'Accept-Encoding': 'gzip, deflate, sdch',
            'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4',
            'Host': 'land.fang.com',
            'Upgrade-Insecure-Requests': 1,
            'User-Agent': random_agent,
        }

        return headers



