import json
from time import sleep

import requests

import pandas as pd
import requests
from lxml import etree

headers = {
    'Accept': 'application/json, text/plain, */*',
    'Accept-Encoding': 'gzip, deflate, br',
    'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
    'Connection': 'keep-alive',
    'Content-Length': '62',
    'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
    'Cookie': 'BIDUPSID=08ABC0196876DC060404203BC8FE5FFE; PSTM=1698922415; BAIDUID=08ABC0196876DC061FFF0832AF282875:FG=1; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; BAIDUID_BFESS=08ABC0196876DC061FFF0832AF282875:FG=1; ZFY=MeLuQ78nQEiA2:AOUPpP:AHUain75nzMFzEMkf8csULMA:C; H_PS_PSSID=39636_39669_39664_39676_39679_39713_39738_39753_39765_39789_39785_39703_39686; delPer=0; PSINO=6; BA_HECTOR=a58l050l2584a0al05218k241ilmh4r1q; Hm_lvt_50e85ccdd6c1e538eb1290bc92327926=1700480226; BCLID=10062588109201161705; BCLID_BFESS=10062588109201161705; BDSFRCVID=XVkOJexroG3O1HvqSnYG8PwVF_weG7bTDYrEOwXPsp3LGJLVFdWiEG0Pts1-dEu-S2OOogKK3gOTH40F_2uxOjjg8UtVJeC6EG0Ptf8g0M5; BDSFRCVID_BFESS=XVkOJexroG3O1HvqSnYG8PwVF_weG7bTDYrEOwXPsp3LGJLVFdWiEG0Pts1-dEu-S2OOogKK3gOTH40F_2uxOjjg8UtVJeC6EG0Ptf8g0M5; H_BDCLCKID_SF=tJIJ_ID2JCD3H48k-4QEbbQH-UnLqhT9WmOZ04n-ah05qnRKhnQ60hFbeb3fLf5-W20jbUjm3UTKsq76Wh35K5tTQP6rLf5U-I34KKJxbpbG8J5IytKaQMu3hUJiB5JMBan7_IJIXKohJh7FM4tW3J0ZyxomtfQxtNRJ0DnjtpChbC_Gj58aj5QLeU5eetjK2CntsJOOaCvVDCnOy4oWK441DpbQLtRDLa4D2DnxbMQPeqvoDh3G3M04K4o9-hvT-54e2p3FBUQPSPJcQft20b0v0tc-KfcatmnuLn7jWhk5ep72y582QlRX5q79atTMfNTJ-qcH0KQpsIJM5-DWbT8IjHCDJ5kDtJuHVbQqatTEKRopMtOhq4tehHRn0x79WDTOQJ7TthAaO-Ql0p0aX6kr5njlybb-3bva-pbw-q5GqlcD3MQNqfIuBnjlKpb03mkjbPbbt66fstKz0T5cXP4syP4eKMRnWnPjKfA-b4ncjRcTehoM3xI8LNj405OTbIFO0KJzJCFahIPlD6KKePDyqx5Ka43tHD7yWCvpWj6cOR59K4nnDURLyhjDQhO3bRbTLMQIX-3qsI3P3MOZKxLg5n7Tbb8eBgvZ2UQI5U3Esq0x0bO5DDuOQq_LQ5j9BKOMahkb5h7xOK-xQlPK5JkgMx6MqpQJQeQ-5KQN3KJmfbL9bT3YjjTLDNADJjDHfKresJoq2RbhKROvhjRi34PgyxoObtRxt28DaPQhLbrZsInM5pjPBn-8QNJaLU3k-eT9LMnx--t58h3_XhjPbh-tQttjQn3et4jbK4KELKoc8n7TyU45bU47ya8j0q4Hb6b9BJcjfU5MSlcNLTjpQT8r5MDOK5OuJRQ2QJ8BtKt2hDbP; H_BDCLCKID_SF_BFESS=tJIJ_ID2JCD3H48k-4QEbbQH-UnLqhT9WmOZ04n-ah05qnRKhnQ60hFbeb3fLf5-W20jbUjm3UTKsq76Wh35K5tTQP6rLf5U-I34KKJxbpbG8J5IytKaQMu3hUJiB5JMBan7_IJIXKohJh7FM4tW3J0ZyxomtfQxtNRJ0DnjtpChbC_Gj58aj5QLeU5eetjK2CntsJOOaCvVDCnOy4oWK441DpbQLtRDLa4D2DnxbMQPeqvoDh3G3M04K4o9-hvT-54e2p3FBUQPSPJcQft20b0v0tc-KfcatmnuLn7jWhk5ep72y582QlRX5q79atTMfNTJ-qcH0KQpsIJM5-DWbT8IjHCDJ5kDtJuHVbQqatTEKRopMtOhq4tehHRn0x79WDTOQJ7TthAaO-Ql0p0aX6kr5njlybb-3bva-pbw-q5GqlcD3MQNqfIuBnjlKpb03mkjbPbbt66fstKz0T5cXP4syP4eKMRnWnPjKfA-b4ncjRcTehoM3xI8LNj405OTbIFO0KJzJCFahIPlD6KKePDyqx5Ka43tHD7yWCvpWj6cOR59K4nnDURLyhjDQhO3bRbTLMQIX-3qsI3P3MOZKxLg5n7Tbb8eBgvZ2UQI5U3Esq0x0bO5DDuOQq_LQ5j9BKOMahkb5h7xOK-xQlPK5JkgMx6MqpQJQeQ-5KQN3KJmfbL9bT3YjjTLDNADJjDHfKresJoq2RbhKROvhjRi34PgyxoObtRxt28DaPQhLbrZsInM5pjPBn-8QNJaLU3k-eT9LMnx--t58h3_XhjPbh-tQttjQn3et4jbK4KELKoc8n7TyU45bU47ya8j0q4Hb6b9BJcjfU5MSlcNLTjpQT8r5MDOK5OuJRQ2QJ8BtKt2hDbP; ab_sr=1.0.1_ZTNmNDYwZGZlOTUxYmVmMDAwOWU4ZTUzNTE2NTUyMzU5MmRjY2U3YjIyOTIxODg1NmQxZDFmMmJlMDcxOThiZDViMDAwMmU0YjUyNDhlNTBmODQwOGE2YWNjMDJkMGM0NTg3ZmFiMTMyYTY5ZGU2YzY4NDBiYzFlMzIzMDM1Mjk1YmNhM2ExYjkyMTk1ZjkwNTFkZjQyMzFlNTVkYTAwZA==; RT="z=1&dm=baidu.com&si=61d28180-85fa-4129-be18-8401086643c5&ss=lp6wittc&sl=0&tt=0&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf"; Hm_lpvt_50e85ccdd6c1e538eb1290bc92327926=1700484446',
    'Host': 'talent.baidu.com',
    'Origin': 'https://talent.baidu.com',
    'Referer': 'https://talent.baidu.com/jobs/social-list?search=',
    'Sec-Ch-Ua': '"Microsoft Edge";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
    'Sec-Ch-Ua-Mobile': '?0',
    'Sec-Ch-Ua-Platform': 'Windows',
    'Sec-Fetch-Dest': 'empty',
    'Sec-Fetch-Mode': 'cors',
    'Sec-Fetch-Site': 'same-origin',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0'
}

url = 'https://talent.baidu.com/httservice/getPostListNew'

form_data = {
    'recruitType': 'SOCIAL',
    'pageSize': 20,
    'keyWord': None,
    'curPage': 1,
    'projectType': None,
}
info_data = []


def req_data():
    response = requests.post(url, headers=headers, data=form_data)
    json_data = response.content.decode()
    data = json.loads(json_data)
    total = int(data.get('data', {}).get('total', 0))
    data_list = data.get('data', {}).get('list', [])
    max_cur_page = int(total / 20) + 2
    push_info(data_list)
    for i in range(2, max_cur_page):
        sleep(1)
        form_data_new = {
            'recruitType': 'SOCIAL',
            'pageSize': 20,
            'keyWord': None,
            'curPage': i,
            'projectType': None,
        }
        response = requests.post(url, headers=headers, data=form_data_new)
        json_data = response.content.decode()
        data = json.loads(json_data)
        data_list = data.get('data', {}).get('list', [])
        push_info(data_list)


def push_info(data_list):
    for info in data_list:
        education = info.get('education', '')
        name = info.get('name', '')
        post_type = info.get('postType', '')
        publish_date = info.get('publishDate', '')
        update_date = info.get('updateDate', '')
        recruit_num = info.get('recruitNum', '')
        service_condition = info.get('serviceCondition', '')
        work_content = info.get('workContent', '')
        work_place = info.get('workPlace', '')
        project_type = info.get('projectType', '')
        favorite_flag = info.get('favoriteFlag', '')
        hot_flag = info.get('hotFlag', '')
        info_data.append([education, name, post_type, publish_date, update_date, recruit_num, service_condition,
                          work_content, work_place, project_type, favorite_flag, hot_flag])


def save_info_data_to_csv():
    df = pd.DataFrame(info_data,
                      columns=['学历', '职位名称', '职位类型', '发布时间', '更新时间', '招聘人数', '服务条件',
                               '工作内容', '工作地点', '项目类型', '是否收藏', '是否热门'])
    df.to_csv('../static/data/job_info.csv', encoding='utf_8_sig', index=False)


if __name__ == '__main__':
    req_data()
    save_info_data_to_csv()
