import sys
from pathlib import Path

from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings

sys.path.append(Path(__file__).parent.parent.parent.parent.as_posix())
from apps.tax_illegal.tax_illegal.clean.base import run_clean
from components.settings.private.net_robot_mysql_settings import *


class CrawlerAll:

    def run(self):
        settings = get_project_settings()
        os.environ["MYSQL_IP"] = MYSQL_IP
        os.environ["MYSQL_PORT"] = MYSQL_PORT
        os.environ["MYSQL_DB"] = MYSQL_DB
        os.environ["MYSQL_USER_NAME"] = MYSQL_USER_NAME
        os.environ["MYSQL_USER_PASS"] = MYSQL_USER_PASS
        process = CrawlerProcess(settings)
        spider_list = process.spider_loader.list()
        for spider in spider_list:
            process.crawl(spider)
        process.start()
        run_clean()


if __name__ == "__main__":
    CrawlerAll().run()
