# -*- coding: utf-8 -*-
# @Time    : 2024/1/30 20:04
# @Author  : micah
# @File    : 12.使用线程池完成百度招聘数据抓取.py
# @Software: PyCharm


import pymysql
import requests
from concurrent.futures import ThreadPoolExecutor


class BaiDuWork:
    def __init__(self):
        self.db = pymysql.connect(host='localhost', user='root', password='123456', db='py_spider')
        self.cursor = self.db.cursor()

        self.api_url = 'https://talent.baidu.com/httservice/getPostListNew'
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Cookie': 'BAIDUID_BFESS=2CC86375B26DE6C5F9E6D5F228682F01:FG=1; BIDUPSID=2CC86375B26DE6C5F9E6D5F228682F01; PSTM=1704281079; ZFY=4WZsRNNBqIPj2GNv2rrXg:BCIOAsJZtTSXEN5nBX4eJo:C; H_WISE_SIDS=282630_283599_281704_284830_285064_282466_286996_110085_287238_283016_284880_287556_287627_287653_287662_287713_283904_287168_287932_280167_283782_288669_288710_288714_288717_288742_288747_288749_284551_287620_288153_284816_269049_265881_281894_289545_289552_287718_289948_289955_282932_290234_290344_290361_286540_290498_290354_286491_290555_290560_290562_290381_282553_282815_286861_289236_289431_287975_291051_203517_291154_287703_287175_290187_291244_277936_290425_288785_256739_290667_288252_291481_281879_279427; H_WISE_SIDS_BFESS=282630_283599_281704_284830_285064_282466_286996_110085_287238_283016_284880_287556_287627_287653_287662_287713_283904_287168_287932_280167_283782_288669_288710_288714_288717_288742_288747_288749_284551_287620_288153_284816_269049_265881_281894_289545_289552_287718_289948_289955_282932_290234_290344_290361_286540_290498_290354_286491_290555_290560_290562_290381_282553_282815_286861_289236_289431_287975_291051_203517_291154_287703_287175_290187_291244_277936_290425_288785_256739_290667_288252_291481_281879_279427; H_PS_PSSID=39999_40024_40044; Hm_lvt_50e85ccdd6c1e538eb1290bc92327926=1706616071; RT="z=1&dm=baidu.com&si=142183cf-fe8b-43a5-8ed0-e47e13b3a0e0&ss=ls0b57p0&sl=1&tt=17l&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf"; Hm_lpvt_50e85ccdd6c1e538eb1290bc92327926=1706616109',
            'Referer': 'https://talent.baidu.com/jobs/social-list?search=python'
        }

    def __del__(self):
        print('数据库链接即将关闭...')
        self.cursor.close()
        self.db.close()

    def get_work_info(self, page):
        form_data = {
            'recruitType': 'SOCIAL',
            'pageSize': 10,
            'keyWord': 'python',
            'curPage': page,
            'projectType': ''
        }

        json_data = requests.post(self.api_url, headers=self.headers, data=form_data).json()
        return json_data

    def parse_work_info(self, response):
        works = response['data']['list']
        for work_info in works:
            education = work_info['education'] if work_info['education'] else '空'
            name = work_info['name']
            service_condition = work_info['serviceCondition']
            self.save_work_info(0, education, name, service_condition)

    def create_table(self):
        create_table_sql = """
            create table if not exists baiduWork_threadPool(
                id int primary key auto_increment,
                education varchar(200),
                name varchar(100),
                service_condition text
            );
        """
        try:
            self.cursor.execute(create_table_sql)
            print('表创建成功...')
        except Exception as e:
            print('表创建失败:', e)

    def save_work_info(self, *args):
        sql = """
            insert into baiduWork_threadPool(id, education, name, service_condition) values (
                %s, %s, %s, %s
            );
        """

        try:
            self.cursor.execute(sql, args)
            self.db.commit()
            print('数据保存成功:', *args)
        except Exception as e:
            print('数据保存失败:', e)
            self.db.rollback()

    def main(self):
        self.create_table()
        with ThreadPoolExecutor(max_workers=5) as pool:
            for page in range(1, 32):
                response = pool.submit(self.get_work_info, page)
                self.parse_work_info(response.result())

            # self.cursor.close()
            # self.db.close()


if __name__ == '__main__':
    baidu_work = BaiDuWork()
    baidu_work.main()
