import re
import requests
from pyquery import PyQuery as pq
from sql_helper import MySqlHelper
import json
import time
from urllib import parse
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1.txt; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36'
headers = {

    'User-Agent': USER_AGENT
}


def read_jd_list(url, item_dict_all=None):
    if item_dict_all is None:
        item_dict_all = list()

    web = webdriver.Chrome()
    web.get(url)
    web.get(url)
    wait = WebDriverWait(web, 10)
    wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '#s_position_list > ul > li > div.list_item_top > div.position > div.p_top > a > h3')))
    posts = web.find_elements_by_css_selector('#s_position_list > ul > li > div.list_item_top > div.position > div.p_top > a > h3')
    companys = web.find_elements_by_css_selector("#s_position_list > ul > li > div.list_item_top > div.company > div.company_name > a")
    cont_url = web.find_elements_by_css_selector("#s_position_list > ul > li > div.list_item_top > div.position > div.p_top > a")
    # web.close()
    for post, company, cont in zip(posts, companys, cont_url):
        print(cont.get_attribute('href'))
        response = requests.get(cont.get_attribute('href'), headers=headers)
        html = pq(response.text)
        site = html('#job_detail > dd.job-address.clearfix > div.work_addr').text()
        city = html('#job_detail > dd.job-address.clearfix > div.work_addr > a:nth-child(1)').text()
        salary = html('body > div.position-head > div > div.position-content-l > dd > p:nth-child(1) > span.salary').text()
        condition = html("#job_detail > dd.job_bt > div").text()
        experience = html('body > div.position-head > div > div.position-content-l > dd > p:nth-child(1) > span:nth-child(3)').text()
        education = html('body > div.position-head > div > div.position-content-l > dd > p:nth-child(1) > span:nth-child(4)').text()
        welfare = html("#job_detail > dd.job-advantage > p").text()
        list_1 = list()
        list_1.append(post.text)  # 岗位
        list_1.append(company.text)  # 公司名称
        print(site)
        list_1.append(site)  # 地址
        list_1.append(city)  # 城市
        list_1.append(salary)  # 工资
        list_1.append(condition)  # 任职要求
        list_1.append(experience.split('/')[0])  # 经验
        list_1.append(education.split('/')[0])  # 学历
        list_1.append(welfare)  # 公司福利
        item_dict_all.append(list_1)

    return item_dict_all


if __name__ == "__main__":
    # , '广州', '杭州', '深圳', '武汉', '重庆', '成都', '郑州', '西安', '济南', '长沙'
    # for i in ['成都', '郑州']:
    #     url_data = parse.urlencode(dict(city=i))
    #     print(url_data)
        url = "https://www.lagou.com/jobs/list_python?px=default&city=长沙"
        print(url)
        item_dict_all = read_jd_list(url)
        mysql = MySqlHelper()
        # 岗位, 公司名称, 地址, 城市, 工资, 任职条件, 经验, 学历, 待遇
        sql = "INSERT INTO lagou(`post`, `company`, `site`, `city`, `salary`, `condition`, `experience`, `education`, `welfare`) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
        print(item_dict_all)
        mysql.exec_many(sql, item_dict_all)
        # a = 0
        # for s in range(0, 60):
        #     a += 1
        #     time.sleep(1)
        #     print(a)
