#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2022/10/19 0:56
# @Author  : when
# @File    : page_producer.py
# @Description : 爬取页面的消费者
import time
import traceback

from loguru import logger
from sqlalchemy.orm import Session
from pymysql.err import OperationalError

from crawler.config.settings import PAGE_NUM
from crawler.orm_model.page import Page
from crawler.orm_model.page import PageStatusChoice
from crawler.website.boss.boss_parser import BossParser
from components.mysql_client.sqlalchemy_connector import SqlConnector


def run():
    db: Session
    db = SqlConnector()
    while True:
        try:
            # 定量获取待解析url
            page_list = db.query(Page).filter_by(status=PageStatusChoice.FINISH).limit(PAGE_NUM).all()
            db.commit()
            if not page_list:
                logger.info(f"未从数据库中获取到待解析的记录")
                time.sleep(20)
                continue
            page_ids = [obj.id for obj in page_list]
            logger.info(f"获取待解析记录，开始解析... page_ids={page_ids}")
            for page_obj in page_list:
                page_obj: Page
                boss_parser = BossParser(db, page_obj)
                ret = boss_parser.parser_job_card()
                if not ret:
                    page_obj.status = PageStatusChoice.RESOLVER_FAIL
                page_obj.status = PageStatusChoice.RESOLVER_FINISH
            db.commit()
            logger.info(f"解析结果写入数据库完成, page_ids={page_ids}")
        except OperationalError:
            db = SqlConnector()
        except Exception as e:
            logger.error(f"主程序错误：{e}\n堆栈信息：{traceback.format_exc()}")


def test():
    db: Session
    db = SqlConnector()
    page_obj = db.query(Page).filter_by(id=1).first()
    logger.debug(f"page_obj={page_obj}")
    boss_parser = BossParser(db, page_obj)
    ret = boss_parser.parser_job_card()
    logger.debug(f"ret={ret}")


if __name__ == '__main__':
    run()
    # test()

