# -*- coding:utf8 -*-
import importlib
import re, sys, json, datetime, random, time
from scrapy.selector import Selector
from scrapy.exceptions import CloseSpider
try:
    from scrapy.spiders import Spider
except:
    from scrapy.spider import BaseSpider as Spider

from scrapy.http import Request, FormRequest
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc

from gaokaopai.items import *
from gaokaopai.dao import *
from gaokaopai.util import *
import datetime

importlib.reload(sys)
# sys.setdefaultencoding("utf-8")

class ChsiMiddleSchoolSpider(Spider):
    name        = 'chsi_middle_school'
    allow       = ['chsi.com.cn']

    def __init__(self, *args, **kwargs):
        super(ChsiMiddleSchoolSpider, self).__init__(*args, **kwargs)

    def start_requests(self):
        yield Request("http://gaokao.chsi.com.cn/zx/sch/home.action", callback=self.parse_list, dont_filter=True)

    def parse_list(self, response):
        base_url = get_base_url(response)

        for a_dom in response.xpath(u"//div[@class='sub_left']/table[1]/.//td/a"):
            name = ''.join(a_dom.xpath(u".//strong/text()").extract()).strip()
            if name == '':
                name = ''.join(a_dom.xpath(u"./text()").extract()).strip()
            url = ''.join(a_dom.xpath(u"./@href").extract()).strip()

            province = getProvince(name)

            yield Request(urljoin_rfc(base_url, url), callback=self.parse_province_list, dont_filter=True, meta={'province':province['id']})

    def parse_province_list(self, response):
        base_url = get_base_url(response)
        meta = response.meta

        now_date = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')

        for a_dom in response.xpath(u"//table[contains(@class, 'sub_table')]/.//td"):
            name = ''.join(a_dom.xpath(u"./a/text()").extract()).strip()
            if name == '':
                name = ''.join(a_dom.xpath(u"./text()").extract()).strip()
                if name != '':
                    middle = getMiddleSchool(meta['province'], name)

                    if middle == None:
                        insert_sql = "insert into t_middle_school (name, province, created_at, updated_at) values ('%s', '%s', '%s', '%s')" \
                            % (name, meta['province'], now_date, now_date)

                        executeSql(insert_sql)
            else:
                url = ''.join(a_dom.xpath(u"./a/@href").extract()).strip()
                if url != '':
                    yield Request(urljoin_rfc(base_url, url), callback=self.parse_school, dont_filter=True, meta={'province':meta['province'], 'name':name})

    def parse_school(self, response):
        base_url = get_base_url(response)
        meta = response.meta

        genre = ''.join(response.xpath(u"//div[@class='l_cnt'][1]/text()").extract()).replace(' ', '').replace('\r\n', '')

        if genre.find(u"省级示范高中") !=-1:
            genre = '省级示范高中'
        else:
            genre = ''

        url = ''.join(response.xpath(u"//div[@class='c_sch']/.//a[@class='color1']/@href").extract())

        if url != '':
            yield Request(urljoin_rfc(base_url, url), callback=self.parse_detail, dont_filter=True, meta={'province':meta['province'], 'name':meta['name'], 'genre':genre})

    def parse_detail(self, response):
        meta = response.meta

        now_date = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')

        motto = ''.join(response.xpath(u"//strong[contains(text(), '校训')]/parent::div/parent::td/following-sibling::td/div/text()").extract())
        schoolmaster = ''.join(response.xpath(u"//strong[contains(text(), '校长')]/parent::div/parent::td/following-sibling::td/div/text()").extract())
        url = ''.join(response.xpath(u"//strong[contains(text(), '网址')]/parent::div/parent::td/following-sibling::td/div/text()").extract())
        found_at = ''.join(response.xpath(u"//strong[contains(text(), '建校时间')]/parent::div/parent::td/following-sibling::td/div/text()").extract())
        summary = ''.join(response.xpath(u"//div[@class='sub_left']/p/node()").extract())

        middle = getMiddleSchool(meta['province'], meta['name'])
        if middle == None:
            insert_sql = "insert into t_middle_school (name, province, found_at, motto, schoolmaster, genre, url, summary, created_at, updated_at) values ('%s', '%s', '%s', '%s','%s', '%s', '%s', '%s', '%s', '%s')" \
                            % (meta['name'], meta['province'], found_at, motto, schoolmaster, meta['genre'], url, summary, now_date, now_date)

            executeSql(insert_sql)