# -*- coding: utf-8 -*-
# @Time    : 2024/1/28 20:57
# @Author  : micah
# @File    : 10.使用多进程完成腾讯招聘信息存储.py
# @Software: PyCharm


import time
import pymongo
import requests
import jsonpath
from multiprocessing import Process, JoinableQueue as Queue


def get_work_info_json(page_num, queue):
    url = "https://careers.tencent.com/tencentcareer/api/post/Query"
    headers = {
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
    }
    params = {
        "timestamp": "1706446462397",
        "countryId": "",
        "cityId": "",
        "bgIds": "",
        "productId": "",
        "categoryId": "",
        "parentCategoryId": "",
        "attrId": "",
        "keyword": "python",
        "pageIndex": str(page_num),
        "pageSize": "10",
        "language": "zh-cn",
        "area": "cn"
    }

    response = requests.get(url, params=params, headers=headers).json()
    for info in response['Data']['Posts']:
        work_info_dict = dict()
        work_info_dict['country_name'] = jsonpath.jsonpath(info, '$..CountryName')[0]
        work_info_dict['location_name'] = jsonpath.jsonpath(info, '$..LocationName')[0]
        work_info_dict['category_name'] = jsonpath.jsonpath(info, '$..CategoryName')[0]
        work_info_dict['responsibility'] = jsonpath.jsonpath(info, '$..Responsibility')[0]
        work_info_dict['last_update_time'] = jsonpath.jsonpath(info, '$..LastUpdateTime')[0]
        queue.put(work_info_dict)


def save_work_info(queue):
    mongo_client = pymongo.MongoClient()
    collection = mongo_client['py_spider']['process_tx_work']
    while True:
        dict_data = queue.get()  # 阻塞
        collection.insert_one(dict_data)
        print('保存成功:', dict_data)
        queue.task_done()


if __name__ == '__main__':
    dict_queue = Queue()

    process_list = list()
    for page in range(1, 43):
        p_get_info = Process(target=get_work_info_json, args=(page, dict_queue))
        process_list.append(p_get_info)

    p_save_work = Process(target=save_work_info, args=(dict_queue,))
    process_list.append(p_save_work)

    for process in process_list:
        # 子进程设置为守护进程
        process.daemon = True
        process.start()

    time.sleep(3)  # 创建进程以及启动进程的速度比较慢

    dict_queue.join()  # 队列中的所有数据全部取出之后释放主进程
    print('进程任务完成...')







