import requests
import time
import json
import pymysql

# 发起请求，获取响应
def get_response(page):
    url = f'https://www.ncss.cn/student/jobs/jobslist/ajax/?jobType=&areaCode=&jobName=&monthPay=&industrySectors=&property=&categoryCode=&memberLevel=&recruitType=&offset={page}&limit=10&keyUnits=&degreeCode=&sourcesName=0&sourcesType=&_=1740565578775'
    headers = {
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36',
    }
    response = requests.get(url=url, headers=headers)
    if response.status_code == 200:
        return response.text
    else:
        return response.status_code

if __name__ == '__main__':
    extracted_data = []
    print(f"***********************************正在爬取***********************************")
    for page in range(1, 50):  # 控制爬取页数，保证数据小于 500 条
        if len(extracted_data) >= 500:
            break
        json_string = get_response(page)
        data = json.loads(json_string)
        if "data" in data and isinstance(data["data"], dict):
            job_list = data["data"].get("list", [])
            for job in job_list:
                if isinstance(job, dict):
                    extracted_data.append({
                        "jobName": job["jobName"],
                        "recName": job["recName"],
                        "lowMonthPay": job["lowMonthPay"],
                        "highMonthPay": job["highMonthPay"],
                        "areaCodeName": job["areaCodeName"],
                        "recTags": job["recTags"]
                    })
                    if len(extracted_data) >= 500:
                        break
        time.sleep(0.1)  # 每爬取一页休眠 3 秒，避免反爬
    print(f"***********************************爬取完毕***********************************\n")

    # 连接数据库
    conn = pymysql.connect(host='localhost', user='root', password='123456', database='jobdb', charset='utf8')
    cursor = conn.cursor()  # 获取操作游标
    # 51job 指数据库 jobdb 中已经建好的 jobs 表
    sql = 'insert into jobs(job_name, company_name, job_salary, job_address, job_label) values(%s, %s, %s, %s, %s)'
    for job in extracted_data:
        # 计算平均月薪
        avg_salary = (job["lowMonthPay"] + job["highMonthPay"]) / 2
        # 插入数据
        cursor.execute(sql, (
            job["jobName"],  # 职位名称
            job["recName"],  # 公司名称
            avg_salary,  # 平均月薪
            job["areaCodeName"],  # 地区名称
            job["recTags"]  # 公司标签
        ))
    # 提交事务
    conn.commit()
    # 关闭游标和连接
    cursor.close()
    conn.close()
    print("数据插入成功！")