# -*- coding: utf-8 -*-
# @Time    : 2024/1/18 20:02
# @Author  : micah
# @File    : 7.使用爬虫获取阿里招聘数据并存储到mysql中.py
# @Software: PyCharm


import pymysql
import requests


class ALiWork:
    def __init__(self):
        self.db = pymysql.connect(host='localhost', port=3306, user='root', password='123456', db='py_spider')
        self.cursor = self.db.cursor()

        self.api_url = 'https://talent.taotian.com/position/search?_csrf=d7d658b2-5f1c-422d-b03b-28d52100ba1e'
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Cookie': 'XSRF-TOKEN=d7d658b2-5f1c-422d-b03b-28d52100ba1e; prefered-lang=zh; SESSION=N0FCMDlEOUMzNjYwNDdBMzlDMjEzQjZEMjlBMzRGRDg=; cna=x6YdHl3ubjYCAa8A4Yx3AjPG; xlly_s=1; isg=BHp6kA8-y1yzvEcwp4zmrNsJy6acK_4FUPjYK4Rx7o09dxuxbb_EFxDGxwOrZ3ad',
            'Referer': 'https://talent.taotian.com/off-campus/position-list?lang=zh'
        }

    # 关闭数据库链接
    def __del__(self):
        self.cursor.close()
        self.db.close()

    # 获取数据方法
    def get_work_info(self):
        for page in range(1, 11):
            json_data = {"channel": "group_official_site", "language": "zh", "batchId": "", "categories": "",
                         "deptCodes": [], "key": "", "pageIndex": page, "pageSize": 10, "regions": "",
                         "subCategories": "",
                         "shareType": "", "shareId": "", "myReferralShareCode": ""}

            response = requests.post(self.api_url, headers=self.headers, json=json_data).json()
            yield response['content']['datas']

    # 清洗数据方法
    def parse_work_info(self, response_generator):
        for work_info_list in response_generator:
            for work_info in work_info_list:
                item = dict()
                item['categories'] = work_info['categories'][0] if work_info['categories'] else '空'
                item['work_name'] = work_info['name']
                item['description'] = work_info['description']

                # 调用保存方法完成数据入库
                self.save_work_info(0, item['categories'], item['work_name'], item['description'])

    # 创建表方法
    def create_table(self):
        sql = """
            create table if not exists ali_work(
                id int primary key auto_increment,
                categories varchar(50),
                work_name varchar(50),
                description text
            );
        """

        try:
            self.cursor.execute(sql)
            print('表创建成功...')
        except Exception as e:
            print('表创建失败:', e)

    # 保存方法
    def save_work_info(self, *args):
        """
        :param args: id, category, work_name, description
        :return:
        """
        # print(args)

        sql = """
            insert into ali_work() values (%s, %s, %s, %s);
        """

        try:
            self.cursor.execute(sql, args)
            # pymysql自带事务, 数据的增删改需要手动事务提交
            self.db.commit()
            print('数据插入成功...')
        except Exception as e:
            print('数据插入失败:', e)
            self.db.rollback()

    # 启动函数
    def main(self):
        self.create_table()
        ali_work_generator = self.get_work_info()
        self.parse_work_info(ali_work_generator)


if __name__ == '__main__':
    ali_work = ALiWork()
    ali_work.main()
