# -*- coding:utf-8 -*-
# --author： jingfeng 
# time: 2018/11/17


import multiprocessing
import queue
import requests
from lxml import etree
from fake_useragent import UserAgent
import re

ua = UserAgent()
headers ={'User-Agent': ua.random}


def get_page_number():
    url = 'https://search.51job.com/list/000000,000000,0000,00,9,99,python,2,1.html'
    response = requests.get(url, headers=headers)
    response.encoding ='gbk'
    html = response.text
    number = re.findall('<span class="td">(.*?)</span>', html)[0]
    page = re.search(r'(\d+)', number).group()

    return page



def get_info(pagelist):

    for i in pagelist:
        url = 'https://search.51job.com/list/000000,000000,0000,00,9,99,python,2,{}.html'.format(str(i))
        print('爬取第{}页'.format(str(i)))

        response = requests.get(url, headers=headers)
        response.encoding = 'gbk'
        html = response.text

        # print(html)
        selector= etree.HTML(html)

        # 职位    公司名   工作地点  薪资 发布时间
        # print(html)
        jobs = selector.xpath('//p[contains(@class,"t1")]/span/a[@target="_blank"]/text()')
        company_names =selector.xpath('//span[@class="t2"]/a[@target="_blank"]/text()')
        adresses = selector.xpath('//span[@class="t3"]/text()')[1:]
        moneys = re.findall(r'<span class="t4">(.*?)</span>',html)[1:]

        times = selector.xpath('//span[@class="t5"]/text()')[1:]
        # print(len(jobs))
        # print(len(company_names))
        # print(len(adresses))
        # print(len(moneys))
        # print(len(times))

        for job, company_name, adress, money, time in zip(jobs,company_names,adresses,moneys,times):

            info_list = [job.strip(),company_name,adress,money,time]
            yield info_list



def get_info_list(pagelist,queue):
    infolist = get_info(pagelist)

    for info in infolist:

        queue.put(info)


page = get_page_number()
queue = multiprocessing.Manager().Queue()
mylist = [x for x in range(1, eval(page) + 1)]

djlist = [[], [], [], [], [], [], [], [], [], []]
N = len(djlist)





if __name__ == "__main__":



    for i in range(len(mylist)):
        djlist[i % N].append(mylist[i])  # 取余寻找列表，均匀分配数据

        processlist = []

    for i in range(10):
        process = multiprocessing.Process(target=get_info_list, args=(djlist[i], queue))
        process.start()
        processlist.append(process)

    for dj in processlist:
        dj.join()

    file = open('python_51job.txt', 'w', encoding='utf-8')

    while not queue.empty():

        try:
            data = queue.get(timeout=2)

            print(data)
            file.write(str(data) + '\n')
            file.flush()


        except Exception as e:
            print(e)
    print('1111111111')









