import requests
import pymysql
import time
import threading
from queue import Queue

class TxzpSpider:
    url = 'https://careers.tencent.com/tencentcareer/api/post/Query?timestamp=1728007701507&countryId=&cityId=&bgIds=&productId=&categoryId=&parentCategoryId=&attrId=&keyword=java&pageIndex={page}&pageSize=10&language=zh-cn&area=cn'
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36'
    }

    def __init__(self):
        self.db_config = {
            'host': 'localhost',
            'port': 3306,
            'user': 'root',
            'passwd': '888888',
            'db': 'test'
        }
        self.url_queue = Queue()
        self.resp_queue = Queue()
        self.save_queue = Queue()

    def get_db_connection(self):
        return pymysql.connect(**self.db_config)

    def get_url(self):
        for page in range(1, 18):
            self.url_queue.put(page)

    def get_resp(self):
        while True:
            page = self.url_queue.get()
            url = self.url.format(page=page)
            try:
                resp = requests.get(url, headers=self.headers).json()
                self.resp_queue.put(resp['Data']['Posts'])
            except Exception as e:
                print(f"请求或解析出错: {e}")
            finally:
                self.url_queue.task_done()

    def parse_data(self):
        while True:
            json_resp = self.resp_queue.get()
            conn = self.get_db_connection()
            cursor = conn.cursor()
            try:
                cursor.execute("""
                    create table if not exists tx_works3(
                        id int primary key auto_increment,
                        work_name varchar(100) not null,
                        country varchar(50),
                        city varchar(50),
                        work_desc text not null
                    );
                """)
                for w in json_resp:
                    work_name = w['RecruitPostName']
                    country = w['CountryName']
                    city = w['LocationName']
                    work_desc = w['Responsibility']
                    cursor.execute("""
                        insert into tx_works3 (work_name, country, city, work_desc)
                        values (%s, %s, %s, %s);
                    """, (work_name, country, city, work_desc))
                conn.commit()
            except Exception as e:
                conn.rollback()
                print(e, "数据库操作失败")
            finally:
                cursor.close()
                conn.close()
            self.resp_queue.task_done()

    def save_data(self):
        while True:
            work_name, country, city, work_desc = self.save_queue.get()
            conn = self.get_db_connection()
            cursor = conn.cursor()
            try:
                cursor.execute("""
                    insert into tx_works3 (work_name, country, city, work_desc)
                    values (%s, %s, %s, %s);
                """, (work_name, country, city, work_desc))
                conn.commit()
            except Exception as e:
                conn.rollback()
                print(e, "插入数据失败")
            finally:
                cursor.close()
                conn.close()
            self.save_queue.task_done()

    def run(self):
        thread_list = []

        t_url = threading.Thread(target=self.get_url)
        thread_list.append(t_url)

        for _ in range(5):
            t_get_info = threading.Thread(target=self.get_resp)
            thread_list.append(t_get_info)

        t_parse = threading.Thread(target=self.parse_data)
        thread_list.append(t_parse)

        t_save = threading.Thread(target=self.save_data)
        thread_list.append(t_save)

        for t_obj in thread_list:
            t_obj.daemon = True
            t_obj.start()

        self.url_queue.join()
        self.resp_queue.join()
        self.save_queue.join()

if __name__ == '__main__':
    start = time.time()
    t = TxzpSpider()
    t.run()
    print('使用时间:', time.time() - start) # 3.4s
    # 得到的数据是一样的 只是顺序会跟单线程爬取的不一样 有线程竞争的问题
