from modules.storage.file_store import FileStore
from scrapy_main.config.main import ConfigMain
from scrapy_main.scrapers.ins_scraper import InsScraper
from scrapy_main.scrapers.otokake_scraper import OtokakeScraper
from scrapy_main.scrapers.pixiv_scraper import PixivScraper
from scrapy_main.scrapers.tingclass_scraper import TingclassScraper
from scrapy_main.scrapers.xlust_scraper import XlustScraper
from utils.os_main import get_abs_file_path

# 使用字典映射
SCRAPER_MAPPING = {
    'xlust': {'class': XlustScraper, 'config_path': 'scrapy_main/scripts/xlust.json'},
    'tingclass': {'class': TingclassScraper, 'config_path': 'scrapy_main/scripts/tingclass.json'},
    'otokake': {'class': OtokakeScraper, 'config_path': 'scrapy_main/scripts/otokake.json'},
    'instagram': {'class': InsScraper, 'config_path': 'scrapy_main/scripts/instagram.json'},
    'pixiv': {'class': PixivScraper, 'config_path': 'scrapy_main/scripts/pixiv.json'}
}


class ScheduleMain:
    def __init__(self, scraper_name):
        self.scraper_name = scraper_name
        self.scraper = None

    def run(self, params=None):
        if self.scraper_name in SCRAPER_MAPPING:
            scraper_info = SCRAPER_MAPPING[self.scraper_name]
            scraper_class = scraper_info['class']
            if params:
                self.scraper = scraper_class(params=params)
            elif scraper_info['config_path']:
                self.scraper = scraper_class(json_file_path=scraper_info['config_path'])

            if params['enable_save_database']:
                # 创建 ConfigMain 实例
                config = ConfigMain()
                params['database_config_param'].pop('password', None)

                # 获取配置值
                config.set_config_value_by_key(params['database_config_name'], params['database_config_param'])
                config.save_config()

            # 把更新后的配置内容保存起来
            script_location = get_abs_file_path(f"scrapy_main/scripts/{self.scraper_name}.json")
            file = FileStore(script_location, write_file_mode='w')
            params.pop('database_config_param', None)
            file.write(params)

            return self.scraper.start_scraper()
        else:
            return {
                'msg': f"Unknown scraper: {self.scraper_name}",
                'code': 500
            }


if __name__ == '__main__':
    scheduler = ScheduleMain('instagram')
    scheduler.run()
