# -*- coding:utf8 -*-

import importlib, re, sys, json, datetime, random, time
from scrapy.selector import Selector
from scrapy.exceptions import CloseSpider
import urllib
import lxml.html
import json
try:
    from scrapy.spiders import Spider
except:
    from scrapy.spider import BaseSpider as Spider

from scrapy.http import Request, FormRequest
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from gaokaopai.items import *
from gaokaopai.dao import *

importlib.reload(sys)


class ScoreLineSpider(Spider):
    name = 'score_line'
    allow = ['wmzy.com']


    def __init__(self, kw='', *args, **kwargs):
        super(ScoreLineSpider, self).__init__(*args, **kwargs)


    def start_requests(self):
        # ,{'mobile': '15205515632', 'password': 'wangmingzhu'}
        # , {'mobile': '18867958465', 'password': '123456789'}
        # , {'mobile': '17318543052', 'password': '123456789'}
        accounts = [{'mobile': '18555113595', 'password': '123456'}


            , {'mobile': '13635656921', 'password': '123456'}
           ]

        cookies = []
        for account in accounts:
            cookies.append(self.login(account))
        print('cookies==========================', cookies)

        universitys = selectScrapyUniversity()
        provinces = selectScrapyProvince()

        for university in universitys:
            for province in provinces:
                url = ('https://www.wmzy.com/api/school/score-list?sch_id=%s&diploma=%s&province=%s&ty=wen&page=1&_=1524817767566'
                       % (university['sch_id'], university['diploma'], province['number']))
                print('url==========================', url)

                yield Request(url,  meta={"university_name": university['name'], "university_id":  university['sch_id']
                    , "university_diploma": university['diploma'], "province_number":  province['number']
                    , "province_name":  province['name'], "type":  'wen'
                    , "province_name": province['name'], "province_name":  province['name']},
                              callback=self.parse_list, cookies=random.choice(cookies) )

    def parse_list(self, response):
        if response.status == 403:
            ss = ScrapyScoreItem()
            ss['table'] = 'scrapy_score'
            ss['name'] = response.meta['university_name']
            ss['sch_id'] = response.meta['university_id']
            ss['diploma'] = response.meta['university_diploma']
            ss['province_name'] = response.meta['province_name']
            ss['number'] = response.meta['province_number']
            ss['type'] = response.meta['type']

            yield ss
        else:
            for td_dom in response.xpath(u"//tr[2]"):
                print('response==========================', td_dom)
                year = ''.join(td_dom.xpath(u"./td[1]/text()").extract()).strip()
                average_score = ''.join(td_dom.xpath(u"./td[2]/text()").extract()).strip() #平均分
                minimum_score = ''.join(td_dom.xpath(u"./td[3]/text()").extract()).strip() #最低分
                provincial_line = ''.join(td_dom.xpath(u"./td[4]/text()").extract()).strip() #省控线
                minimum_score_ranking = ''.join(td_dom.xpath(u"./td[5]/text()").extract()).strip() #最低分排名
                people_number = ''.join(td_dom.xpath(u"./td[6]/text()").extract()).strip() #录取人数
                batch = ''.join(td_dom.xpath(u"./td[7]/text()").extract()).strip() #录取批次
                print('year==========================', year)
                print('average_score==========================', average_score)
                if year == '2017':
                    su = ScrapyScoreLineItem()
                    su['table'] = 'scrapy_score_line'
                    su['university_name'] =  response.meta['university_name']
                    su['province_name'] = response.meta['province_name']
                    su['year'] = year
                    su['average_score'] = average_score
                    su['minimum_score'] = minimum_score
                    su['provincial_line'] = provincial_line
                    su['minimum_score_ranking'] = minimum_score_ranking
                    su['people_number'] = people_number
                    su['batch'] = batch
                    su['type'] = 'wen'
                    yield su

    def login(self, account):
        url = 'https://www.wmzy.com/api/school-00-0-0-0'
        driver = webdriver.Chrome()
        driver.get(url)
        driver.find_element_by_id("login_link").click()
        driver.find_element_by_id("switchForm").click()

        driver.find_element_by_id("mobile").send_keys(account['mobile'])
        driver.find_element_by_id("password").send_keys(account['password'])
        try:
            submitBtnEl = driver.find_element_by_class_name('btn-submitFrom')
            print('submit:', submitBtnEl)
            submitBtnEl.click()
        except Exception:
            print('error')
        time.sleep(10)

        cookies = driver.get_cookies()
        print('cookies', cookies)
        driver.close()
        driver.quit()
        return cookies

