# coding:utf-8
import json
import math

import requests
import scrapy
from scrapy import Field
from scrapy.loader import ItemLoader

from gkfstj.items import SchoolInfoItem, SpecialPlayItem, ProvinceScoreItem, SpecialScoreItem
import xpinyin


class GkcxSpider(scrapy.Spider):
    page_size = 20
    name = 'gkcx'
    allowed_domains = ['eol.cn']

    def start_requests(self):
        url = 'https://api.eol.cn/gkcx/api/?page=1&request_type=1&size=1&sort=view_total&uri=apigkcx/api/school/hotlists'
        yield scrapy.Request(url=url, callback=self.parse)

    def parse(self, response):
        res = json.loads(response.text)
        if res.get('code') == '0000':
            num = res.get('data').get('numFound')
            max_page = math.ceil(num / self.page_size) + 1
            for index in range(1, max_page):
                base_url = 'https://api.eol.cn/gkcx/api/?page={}&request_type=1&size={}&sort=view_total&uri=apigkcx/api/school/hotlists'
                url = base_url.format(index, self.page_size)
                yield scrapy.Request(url=url, callback=self.parse_school_ids)

    def parse_school_ids(self, response):
        res = json.loads(response.text)
        if res.get('code') == '0000':
            items = res.get('data').get('item')
            for item in items:
                school_id = item.get('school_id')
                # 获取大学信息
                base_url = 'https://static-data.eol.cn/www/school/{}/info.json'
                url = base_url.format(school_id)
                yield scrapy.Request(url=url, callback=self.parse_school_info, meta={'school_id': school_id})
                # 获取计划
                base_special_plan_url = 'https://static-data.eol.cn/www/school/{}/dic/specialplan.json'
                special_play_url = base_special_plan_url.format(school_id)
                yield scrapy.Request(url=special_play_url, callback=self.parse_special_plan_total,
                                     meta={'school_id': school_id})
                # 获取各个省份分数
                base_province_score_url = 'https://static-data.eol.cn/www/school/{}/dic/provincescore.json'
                province_score_url = base_province_score_url.format(school_id)
                yield scrapy.Request(url=province_score_url, callback=self.parse_province_score_total,
                                     meta={'school_id': school_id})
                # 获取专业分数线
                base_special_score_url = 'https://static-data.eol.cn/www/school/{}/dic/specialscore.json'
                special_score_url = base_special_score_url.format(school_id)
                yield scrapy.Request(url=special_score_url, callback=self.parse_special_score_total,
                                     meta={'school_id': school_id})

    def parse_special_score_total(self, response):
        res = json.loads(response.text)
        school_id = response.meta['school_id']
        for data in res.get('data'):
            year = data.get('year')
            for province in data.get('province'):
                pid = province.get('pid')
                for p_type in province.get('type'):
                    for batch in province.get('batch'):
                        base_special_score = 'https://api.eol.cn/gkcx/api/?local_province_id={}&local_type_id={}&page={}&school_id={}&size={}&uri=apidata/api/gk/score/special&year={}'
                        special_score = base_special_score.format(pid, p_type, 1, school_id, 1, year)
                        yield scrapy.Request(url=special_score, callback=self.fetch_special_score_total,
                                             meta={'school_id': school_id, 'year': year,
                                                   'batch': batch, 'pid': pid, 'p_type': p_type})

    def fetch_special_score_total(self, response):
        base_special_score = 'https://api.eol.cn/gkcx/api/?local_province_id={}&local_type_id={}&page={}&school_id={}&size={}&uri=apidata/api/gk/score/special&year={}'
        info = json.loads(response.text)
        if isinstance(info, dict):
            num = info.get('data').get('numFound')
            max_page = math.ceil(num / self.page_size) + 1
            for index in range(1, max_page):
                special_score = base_special_score.format(response.meta['pid'],
                                                          response.meta['p_type'], index, response.meta['school_id'],
                                                          self.page_size, response.meta['year'])
                yield scrapy.Request(url=special_score, callback=self.fetch_special_score,
                                     meta={'school_id': response.meta['school_id'], 'year': response.meta['year'],
                                           'batch': response.meta['batch'], 'pid': response.meta['pid'],
                                           'p_type': response.meta['p_type']})

    def fetch_special_score(self, response):
        info = json.loads(response.text)
        if info.get('code') == '0000':
            for data in info.get('data').get('item'):
                data['school_id'] = response.meta['school_id']
                data['year'] = response.meta['year']
                data['batch_id'] = response.meta['batch']
                data['p_id'] = response.meta['pid']
                data['p_type'] = response.meta['p_type']
                item = SpecialScoreItem()
                item.fields['special_score'] = Field()
                item['special_score'] = data
                yield item

    def parse_province_score_total(self, response):
        res = json.loads(response.text)
        school_id = response.meta['school_id']
        for data in res.get('data'):
            year = data.get('year')
            for province in data.get('province'):
                pid = province.get('pid')
                for p_type in province.get('type'):
                    for batch in province.get('batch'):
                        base_province_score = 'https://api.eol.cn/gkcx/api/?local_province_id={}&local_type_id={}&page={}&school_id={}&size={}&uri=apidata/api/gk/score/province&year={}'
                        province_score = base_province_score.format(pid, p_type, 1, school_id, 1, year)
                        yield scrapy.Request(url=province_score, callback=self.fetch_province_score_total,
                                             meta={'school_id': school_id, 'year': year,
                                                   'batch': batch, 'pid': pid, 'p_type': p_type})

    def fetch_province_score_total(self, response):
        base_province_score = 'https://api.eol.cn/gkcx/api/?local_province_id={}&local_type_id={}&page={}&school_id={}&size={}&uri=apidata/api/gk/score/province&year={}'
        info = json.loads(response.text)
        if isinstance(info, dict):
            num = info.get('data').get('numFound')
            max_page = math.ceil(num / self.page_size) + 1
            for index in range(1, max_page):
                special_plan = base_province_score.format(response.meta['pid'],
                                                          response.meta['p_type'], index, response.meta['school_id'],
                                                          self.page_size, response.meta['year'])
                yield scrapy.Request(url=special_plan, callback=self.fetch_province_score,
                                     meta={'school_id': response.meta['school_id'], 'year': response.meta['year'],
                                           'batch': response.meta['batch'], 'pid': response.meta['pid'],
                                           'p_type': response.meta['p_type']})

    def fetch_province_score(self, response):
        info = json.loads(response.text)
        if info.get('code') == '0000':
            for data in info.get('data').get('item'):
                data['school_id'] = response.meta['school_id']
                data['year'] = response.meta['year']
                data['batch_id'] = response.meta['batch']
                data['p_id'] = response.meta['pid']
                data['p_type'] = response.meta['p_type']
                item = ProvinceScoreItem()
                item.fields['province_score'] = Field()
                item['province_score'] = data
                yield item

    def parse_special_plan_total(self, response):
        res = json.loads(response.text)
        school_id = response.meta['school_id']
        for data in res.get('data'):
            year = data.get('year')
            for province in data.get('province'):
                pid = province.get('pid')
                for p_type in province.get('type'):
                    for batch in province.get('batch'):
                        base_special_plan = 'https://api.eol.cn/gkcx/api/?local_batch_id={}&local_province_id={}&local_type_id={}&page={}&school_id={}&size={}&uri=apidata/api/gk/plan/special&year={}'
                        special_plan_total = base_special_plan.format(batch, pid, p_type, 1, school_id, 1, year)
                        yield scrapy.Request(url=special_plan_total, callback=self.fetch_special_plan_total,
                                             meta={'school_id': school_id, 'year': year,
                                                   'batch': batch, 'pid': pid, 'p_type': p_type})

    def fetch_special_plan_total(self, response):
        base_special_plan = 'https://api.eol.cn/gkcx/api/?local_batch_id={}&local_province_id={}&local_type_id={}&page={}&school_id={}&size={}&uri=apidata/api/gk/plan/special&year={}'
        info = json.loads(response.text)
        if isinstance(info, dict):
            num = info.get('data').get('numFound')
            max_page = math.ceil(num / self.page_size)
            for index in range(1, max_page):
                special_plan = base_special_plan.format(response.meta['batch'], response.meta['pid'],
                                                        response.meta['p_type'], index, response.meta['school_id'],
                                                        self.page_size, response.meta['year'])
                yield scrapy.Request(url=special_plan, callback=self.fetch_special_plan,
                                     meta={'school_id': response.meta['school_id'], 'year': response.meta['year'],
                                           'batch': response.meta['batch'], 'pid': response.meta['pid'],
                                           'p_type': response.meta['p_type']})

    def fetch_special_plan(self, response):
        info = json.loads(response.text)
        if info.get('code') == '0000':
            for data in info.get('data').get('item'):
                data['school_id'] = response.meta['school_id']
                data['year'] = response.meta['year']
                data['batch_id'] = response.meta['batch']
                data['p_id'] = response.meta['pid']
                data['p_type'] = response.meta['p_type']
                item = SpecialPlayItem()
                item.fields['special_plan'] = Field()
                item['special_plan'] = data
                yield item

    def fetch_content_info(self, school_id, info_id):
        try:
            base_url = 'https://static-data.eol.cn/www/school/{}/detail/{}.json'
            url = base_url.format(school_id, info_id)
            info = requests.get(url=url)
            return json.loads(info.text)
        except Exception as e:
            return 'error'

    def convert_to_pinyin(self, hanzi):
        p = xpinyin.Pinyin()
        return p.get_initials(hanzi, '').lower()

    def parse_school_info(self, response):
        res = json.loads(response.text)
        item = SchoolInfoItem()
        # il = ItemLoader(item=item)
        if isinstance(res, dict):
            for key in res:
                item.fields[key] = Field()
                item[key] = res.get(key)
        school_id = response.meta['school_id']
        lab_json = self.fetch_content_info(school_id=school_id, info_id='68015')
        if not isinstance(lab_json, str):
            lab_key = self.convert_to_pinyin(hanzi=lab_json.get('type_name'))
            lab_value = lab_json.get('content')
            item.fields[lab_key] = Field()
            item[lab_key] = lab_value
        for info_id in range(69000, 69010 + 1):
            info_json = self.fetch_content_info(school_id=school_id, info_id=info_id)
            if not isinstance(info_json, str):
                info_key = self.convert_to_pinyin(hanzi=info_json.get('type_name'))
                info_value = info_json.get('content')
                item.fields[info_key] = Field()
                item[info_key] = info_value
        base_special_url = 'https://static-data.eol.cn/www/school/{}/info.json'
        special_url = base_special_url.format(response.meta['school_id'])
        special_text = json.loads(requests.get(special_url).text)
        item.fields['special_text'] = Field()
        item['special_text'] = special_text
        yield item
