import os
import sys
import logging
spider_root_dir = os.getenv("SPIDER_PRO_ROOT_DIR", "./")
sys.path.append(spider_root_dir)
os.environ["SCRAPY_SETTINGS_MODULE"] = "eolcrawl.settings"  # 替换为你的项目名称
# 在 ros_main.py 中添加环境变量设置
os.environ["SCRAPY_DEBUG"] = "1"

import time

from eolcrawl.database.spider_db_models import (
        init_database_tables,
        close_connection,
    )
from eolcrawl.spiderutils.log_helper import get_mylogger

logging = get_mylogger("main",level=logging.INFO)



#
def run1(spider_name):
    from scrapy.cmdline import execute

    sys.path.append(os.path.dirname(os.path.abspath(__file__)))
    logging.info(f"start scrapy crawl {spider_name},time:{time.time()}")
    execute(["scrapy", "crawl", spider_name])
    logging.info(f"end scrapy crawl {spider_name},time:{time.time()}")


def single_run():
    # spider_name = 'ros_source'
    # spider_name = 'ros_pkg_download'
    # spider_name = 'agiros_realse_code'
    # spider_name = 'agiros_realse_pkg'
    # spider_name = 'agiros_realse_code_login'
    # spider_name = 'hn_login'
    
    # spider_items =['ros_source','ros_pkg_download','agiros_realse_code','agiros_realse_pkg']
    # spider_items =['agiros_realse_code','agiros_realse_pkg']
    # spider_items =['ros_pkg_download']
    spider_items =['agiros_realse_code']
    # spider_items =['zbggdetail']
    # spider_items =['agiros_realse_pkg']
    
    for item in spider_items:
        run1(item)
    
    pass



def init_spider_env():      
    from dotenv import load_dotenv

    load_dotenv("ros_env.env",override=True)
    
def init_spider():
    logging.info("init spider")
    init_spider_env()
    init_database_tables()


def close_spider():
    logging.info("close spider")
    close_connection()


if __name__ == "__main__":
    init_spider()
    single_run()
    close_spider()
    pass