# coding:utf-8
# 导入webdriver
import configparser
import os
import random
import socket
import sys
import traceback
from threading import Thread

import pymysql
import time

import requests
import selenium
import urllib3
from selenium import webdriver
from selenium.webdriver import ChromeOptions

from Utils.ExtractInfo import extract_info
from Utils.Log import mylog
from Utils.PrepareInfo import prepare_info
from Utils.Time import mytime


class LagouJob(object):
    def __init__(self):
        # BUG selenium最大化可能会出异常
        options = ChromeOptions()
        options.add_argument("--start-maximized")
        # UNMODIFIED webdriver对象初始化后属性无法修改
        # 获取浏览器对象
        self.driver = webdriver.Chrome(chrome_options=options)
        # 获取起始页url
        self.url = 'https://www.lagou.com/jobs/list_{keyword}?px=new&city={location}'
        # 获取列表页页码
        self.listpage_page_next = ''
        # 获取列表页全部url
        self.listpage_urls = []
        # 获取当前岗位信息
        self.detail_info = {}
        # 获取全部岗位信息
        self.all_info = []
        # 获取当前路径
        self.origin_path = os.getcwd()
        # 获取新路径
        self.new_path = '/'.join(self.origin_path.split('\\')[:-2]) + '/Utils'
        # 设置日志
        mylog.basicConfig('debug')
        # 设置MySQL
        user_pswd_cfg = os.environ['user_pswd']
        cf = configparser.ConfigParser()
        cf.read(user_pswd_cfg, encoding='utf-8')
        mysql_user = cf.get('MySQL', 'MySQL_user')
        mysql_pswd = cf.get('MySQL', 'MySQL_pswd')
        self.config = {
            'host': 'localhost',
            'user': mysql_user,
            'password': mysql_pswd,
            'database': 'lagou',
            'port': 3306,
            'charset': 'utf8',
        }
        # 所有数据库字段
        self.table_fields = []
        self.js = 'window.scrollTo(0,document.body.scrollHeight);'

    def parse_listpage(self):
        if self.listpage_page_next == 1:
            self.driver.get(self.url)
        _el_list = self.driver.find_elements_by_xpath('//a[@class="position_link"]')
        self.listpage_urls.extend([_el.get_attribute('href') for _el in _el_list])
        time.sleep(random.randint(1, 3))
        self.driver.execute_script(self.js)
        buttons = self.driver.find_elements_by_xpath('//*[@id="s_position_list"]/div[3]/div/span')
        next_button_class = buttons[-1].get_attribute('class').strip()
        if next_button_class == 'pager_next':
            buttons[-1].click()
            time.sleep(random.randint(1, 3))
            self.listpage_page_next += 1
        else:
            self.listpage_page_next = None

    def save_urls(self):
        with open('doc/urls.txt', mode='a+') as f:
            for url in self.listpage_urls:
                f.write(url + '\n')

    def read_urls(self):
        with open('doc/urls.txt') as f:
            self.listpage_urls = f.readlines()

    def update_proxies(self):
        os.chdir(self.new_path)
        while True:
            time.sleep(3 * 60)
            prepare_info.save_proxies()

    def parse_time(self, _time):
        if '前' in _time:
            _time = mytime.get_ymd_before(day=int('-' + _time[0]))
        elif ':' in _time:
            _time = '{date} {time}'.format(date=mytime.get_ymd(), time=_time)
        return _time

    def set_init_params(self):
        if self.driver:
            self.driver.close()
        options = ChromeOptions()
        options.add_argument("--start-maximized")
        os.chdir(self.new_path)
        user_agent = extract_info.get_useragent()
        # 修改user-agent
        options.add_argument('--user-agent=' + user_agent['User-Agent'])
        extract_info.ua_default = user_agent
        proxy = extract_info.get_proxy()
        proxy_url = ''
        for key in proxy:
            # 修改ip代理
            options.add_argument('--proxy=' + proxy[key])
            options.add_argument('--proxy-type=' + key)
        self.driver = webdriver.Chrome(chrome_options=options)

    def parse_detail(self):
        try:
            self.set_init_params()
            self.driver.get(self.url)
            time.sleep(random.randint(1, 3))
            _temp = {}
            _temp['link'] = self.url
            _temp['name'] = self.driver.find_element_by_xpath('//div[@class="company"]').text
            _temp['job'] = self.driver.find_element_by_xpath('//div[@class="job-name"]').get_attribute('title')
            _temp['salary'] = self.driver.find_element_by_xpath('//span[@class="salary"]').text
            _temp['exp'] = self.driver.find_elements_by_xpath('//dd[@class="job_request"]//span')[2].text[:-1]
            _temp['edu'] = self.driver.find_elements_by_xpath('//dd[@class="job_request"]//span')[3].text[:-1]
            _temp['type'] = self.driver.find_elements_by_xpath('//dd[@class="job_request"]//span')[4].text
            _time = self.driver.find_element_by_xpath('//p[@class="publish_time"]').text.split(' ')[0]
            _temp['time'] = self.parse_time(_time)
            _temp['advantage'] = ''.join(
                [_i.text for _i in self.driver.find_elements_by_xpath('//dd[@class="job-advantage"]/p')])
            _temp['duty'] = ''.join(
                [_i.text for _i in self.driver.find_elements_by_xpath('//dd[@class="job_bt"]/div/p')])
            _temp['location'] = ''.join(
                [_i.text for _i in self.driver.find_elements_by_xpath('//div[@class="work_addr"]')])
        except (selenium.common.exceptions.NoSuchElementException, selenium.common.exceptions.WebDriverException,
                socket.gaierror, urllib3.exceptions.NewConnectionError, urllib3.exceptions.MaxRetryError,
                requests.exceptions.ConnectionError):
            mylog.setLog('error', traceback.format_exc())
            time.sleep(30)
            return
        self.detail_info = _temp
        self.all_info.append(_temp)
        self.listpage_urls.remove(self.url)

    def save_to_mysql(self, keyword):
        conn = None
        csr = None
        try:
            conn = pymysql.connect(**self.config)
            csr = conn.cursor()
            if self.table_fields == []:
                # 查询表字段
                select_sql = 'select COLUMN_NAME from information_schema.columns where table_name="{keyword}" and TABLE_SCHEMA="{database}"'.format(
                    keyword=keyword, database=self.config['database'])
                sel_result = csr.execute(select_sql)
                mylog.setLog('debug', 'select {sel_result} field(s)'.format(sel_result=sel_result))
                for field in csr.fetchall():
                    self.table_fields.append(field[0])
            # 插入数据
            insert_sql = 'INSERT INTO python values({placeholder}%s)'.format(
                placeholder='%s, ' * len(self.table_fields[:-1]))
            params = []
            for field in self.table_fields:
                params.append(self.detail_info[field] if field in self.detail_info else '')
            ins_result = csr.execute(insert_sql, params)
            mylog.setLog('debug', 'insert {ins_result} record(s)'.format(ins_result=ins_result))
            conn.commit()
        except:
            if conn:
                conn.rollback()
            mylog.setLog('error', traceback.format_exc())
            sys.exit(1)
        finally:
            if csr:
                csr.close()
            if conn:
                conn.close()

    def run(self, location='上海', keyword='python'):
        """
        # 获取链接
        self.url = self.url.format(keyword=keyword.capitalize(), location=location)
        self.listpage_page_next = 1
        while self.listpage_page_next:
            mylog.setLog('debug', 'listpage_page:{listpage_page}'.format(listpage_page=self.listpage_page_next))
            # 获取响应，提取详情链接 翻页
            self.parse_listpage()
        # 链接存储
        self.save_urls()
        """

        # 更新IP代理池
        thread = Thread(target=self.update_proxies)
        thread.start()
        # 链接解析
        self.read_urls()
        while self.listpage_urls != []:
            self.url = self.listpage_urls[0]

            mylog.setLog('debug', 'detail_url:{detail_url}'.format(detail_url=self.url))

            # 获取响应，记录详情信息
            self.parse_detail()
            self.save_to_mysql(keyword)

        mylog.setLog('debug', 'task completed!')

        self.driver.close()
        sys.exit(0)


def main():
    lagouJob = LagouJob()
    lagouJob.run()


if __name__ == '__main__':
    main()
