# -*- coding: utf-8 -*-
import os
import sys
from twisted.internet import reactor, defer
from scrapy.crawler import CrawlerRunner
from scrapy.utils.project import get_project_settings
from squirrel_core.commons.utils.tools import set_log_env


def start_scrapy_job(obj_class, *args, **kwargs):
    queues = kwargs.get('queue', [])

    crawler_config = kwargs.get('config', {}).get('job_config', {})
    logger = kwargs.get('logger', None)
    _kwargs = {'config': crawler_config, 'queue': queues}

    job_id = crawler_config.get('serialNumber', '')
    scrapy_settings = 'squirrel_core.settings'
    os.environ['spider_name'] = kwargs["config"]["job_config"]["spider_name"]
    os.environ['SCRAPY_SETTINGS_MODULE'] = scrapy_settings
    set_log_env(crawler_config.get('name_first'), crawler_config.get('name_second'), job_id=job_id)

    settings = get_project_settings()

    runner = CrawlerRunner(settings)

    @defer.inlineCallbacks
    def crawl():
        try:
            yield runner.crawl(obj_class, *args, **_kwargs)
        except Exception as e:
            print(f"爬虫运行失败: {e}")
        finally:
            try:
                if reactor.running:
                    for delayed in reactor.getDelayedCalls():
                        delayed.cancel()
                    reactor.stop()
            except Exception as stop_error:
                print(f"停止 reactor 时出错: {stop_error}")
                sys.exit(1)

    def on_failure(failure):
        print(f"爬虫执行失败: {failure}")
        try:
            if reactor.running:
                reactor.stop()
        except:
            sys.exit(1)

    d = defer.ensureDeferred(crawl())
    d.addErrback(on_failure)
    reactor.run()

    # d.addErrback(on_failure)
    # # crawl()
    # reactor.run()
