# -*- coding:utf8 -*-
import importlib
import re, sys, json, datetime, random, time
from scrapy.selector import Selector
from scrapy.exceptions import CloseSpider
try:
    from scrapy.spiders import Spider
except:
    from scrapy.spider import BaseSpider as Spider

from scrapy.http import Request, FormRequest
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc

from gaokaopai.items import *
from gaokaopai.dao import *
from gaokaopai.util import *
import jieba
import datetime

importlib.reload(sys)
# sys.setdefaultencoding("utf-8")

class SdaxueUniversitySpider(Spider):
    name        = 'sdaxue_university'
    allow       = ['sdaxue.com']

    def __init__(self, *args, **kwargs):
        super(SdaxueUniversitySpider, self).__init__(*args, **kwargs)

    def start_requests(self):
        yield Request("http://www.sdaxue.com/college/filter.html", callback=self.parse_list, dont_filter=True)

    def parse_list(self, response):
        base_url = get_base_url(response)

        for a_dom in response.xpath(u"//h5[contains(text(), '按省份')]/parent::ul/li/a"):
            url = ''.join(a_dom.xpath(u"./@href").extract()).strip()
            yield Request(urljoin_rfc(base_url, url), callback=self.parse_province_list, dont_filter=True)

    def parse_province_list(self, response):
        base_url = get_base_url(response)

        print(response.url)

        for a_dom in response.xpath(u"//div/h6/a"):
            name = ''.join(a_dom.xpath(u"./text()").extract()).strip()
            url = ''.join(a_dom.xpath(u"./@href").extract()).strip()

            name = name.replace('(', '（').replace(')', '）')

            university= getUniversity(name)
            if university != None:
                university_url = urljoin_rfc(base_url, url)

                # 专业信息
                yield Request(university_url, callback=self.parse_university_zhuanye, dont_filter=True, meta={'id':university['id'], 'fid':university['fid']})

                # 院校简介
                # yield Request(university_url.replace('college', 'summary'), callback=self.parse_university_summary, dont_filter=True, meta={'id':university['id'], 'fid':university['fid']})

        next_url = ''.join(response.xpath(u"//a[contains(@class, 'next_page')]/@href").extract())
        if next_url != None and response.url != urljoin_rfc(base_url, next_url):
            yield Request(urljoin_rfc(base_url, next_url), callback=self.parse_province_list, dont_filter=True)

    def parse_university_zhuanye(self, response):
        meta = response.meta

        now_date = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')

        '''
        # 国家品牌
        for p_a_dom in response.xpath(u"//ul[contains(@class, 'special_major')]/li/strong[contains(text(), '国家品牌')]/parent::li/p/a"):
            name = ''.join(p_a_dom.xpath("./text()").extract()).strip()

            point = getUniversityPoint(meta['fid'], '国家品牌', name, 0)
            if point == None:
                insert_sql = "insert into t_university_point (fid, special, name, `from`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (meta['fid'], '国家品牌', name, 2, 0, now_date, now_date)
                executeSql(insert_sql)

        for p_a_dom in response.xpath(u"//ul[contains(@class, 'special_major')]/li/strong[contains(text(), '国家品牌')]/parent::li/p/span"):
            name = ''.join(p_a_dom.xpath("./text()").extract()).strip()

            point = getUniversityPoint(meta['fid'], '国家品牌', name, 1)
            if point == None:
                insert_sql = "insert into t_university_point (fid, special, name, `from`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (meta['fid'], '国家品牌', name, 2, 1, now_date, now_date)
                executeSql(insert_sql)

        # 国家重点
        for p_a_dom in response.xpath(u"//ul[contains(@class, 'special_major')]/li/strong[contains(text(), '国家重点')]/parent::li/p/a"):
            name = ''.join(p_a_dom.xpath("./text()").extract()).strip()

            point = getUniversityPoint(meta['fid'], '国家重点', name, 0)
            if point == None:
                insert_sql = "insert into t_university_point (fid, special, name, `from`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (meta['fid'], '国家重点', name, 2, 0, now_date, now_date)
                executeSql(insert_sql)

        for p_a_dom in response.xpath(u"//ul[contains(@class, 'special_major')]/li/strong[contains(text(), '国家重点')]/parent::li/p/span"):
            name = ''.join(p_a_dom.xpath("./text()").extract()).strip()

            point = getUniversityPoint(meta['fid'], '国家重点', name, 1)
            if point == None:
                insert_sql = "insert into t_university_point (fid, special, name, `from`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (meta['fid'], '国家重点', name, 2, 1, now_date, now_date)
                executeSql(insert_sql)

        # 省部重点
        for p_a_dom in response.xpath(u"//ul[contains(@class, 'special_major')]/li/strong[contains(text(), '省部重点')]/parent::li/p/a"):
            name = ''.join(p_a_dom.xpath("./text()").extract()).strip()

            point = getUniversityPoint(meta['fid'], '省部重点', name, 0)
            if point == None:
                insert_sql = "insert into t_university_point (fid, special, name, `from`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (meta['fid'], '省部重点', name, 2, 0, now_date, now_date)
                executeSql(insert_sql)

        for p_a_dom in response.xpath(u"//ul[contains(@class, 'special_major')]/li/strong[contains(text(), '省部重点')]/parent::li/p/span"):
            name = ''.join(p_a_dom.xpath("./text()").extract()).strip()

            point = getUniversityPoint(meta['fid'], '省部重点', name, 1)
            if point == None:
                insert_sql = "insert into t_university_point (fid, special, name, `from`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (meta['fid'], '省部重点', name, 2, 1, now_date, now_date)
                executeSql(insert_sql)
        '''

        # 院系
        for college_dom in response.xpath(u"//ul[contains(@class, 'faculty_major')]/li"):
            college_name = ''.join(college_dom.xpath(u"./strong/text()").extract()).strip()

            university_college = getUniversityCollege(meta['fid'], college_name)
            if university_college == None:
                pid = insertUniversityCollegeLevelOne(meta['fid'], college_name)
            else:
                pid = university_college['id']

            print("="*20)
            print(meta['fid'], pid, college_name)
            print("="*20)

            for college_p_dom in college_dom.xpath(u"./p/a"):
                xi_name = ''.join(college_p_dom.xpath(u"./text()").extract()).strip()
                xi_type = ''.join(college_p_dom.xpath(u"./@data-type").extract())

                xi_college = getUniversityCollege(meta['fid'], xi_name, pid, 2, xi_type, 0)

                if xi_college == None:
                    college_insert_sql = "insert into t_university_college (pid, fid, name, level, `type`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (pid, meta['fid'], xi_name, 2, xi_type, 0, now_date, now_date)

                    executeSql(college_insert_sql)

            for college_p_dom in college_dom.xpath(u"./p/span"):
                xi_name = ''.join(college_p_dom.xpath(u"./text()").extract()).strip()
                xi_type = ''.join(college_p_dom.xpath(u"./@data-type").extract())

                xi_college = getUniversityCollege(meta['fid'], xi_name, pid, 2, xi_type, 1)

                if xi_college == None:
                    college_insert_sql2 = "insert into t_university_college (pid, fid, name, level, `type`, isself, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" \
                    % (pid, meta['fid'], xi_name, 2, xi_type, 1, now_date, now_date)

                    executeSql(college_insert_sql2)

    def parse_university_summary(self, response):
        meta = response.meta

        tips = ''.join(response.xpath(u"//li[@class='summary']/p[not(contains(text(), '该院校暂无简介'))]").extract())
        if tips != '':
            summary = ''.join(response.xpath(u"//li[contains(@class, 'summary')]/node()").extract())

            sql = "update t_university set summary='%s' where id='%s'" % (summary, meta['id'])
            executeSql(sql)

        # 判断是否有大记事
        history = ''.join(response.xpath(u"//li[contains(text(), '院校历史')]/text()").extract())
        if history != '':

            now_date = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')

            for div_dom in response.xpath(u"//li[contains(@class, 'history_box')]/div"):
                year = ''.join(div_dom.xpath(u".//div[contains(@class, 'year')]/text()").extract())
                content = ''.join(div_dom.xpath(u".//div[contains(@class, 'event')]/text()").extract())
                title = ''.join(div_dom.xpath(u".//div[contains(@class, 'event')]/span/text()").extract())

                if year != '':
                    history_data = getUniversityHistory(meta['fid'], year)
                    if history_data == None:
                        print("======="*5)
                        print(meta['id'])
                        print("======="*5)

                        insert_sql = "insert into t_university_history (fid, year, title, content, created_at, updated_at) values ('%s', '%s', '%s', '%s', '%s', '%s')" \
                            % (meta['fid'], year, title, content, now_date, now_date)

                        executeSql(insert_sql)