import threading
import time
from scrapy import cmdline
from baiduNovel.redisTomongo import main as rtom_main  # 导入你的Redis到Mongo同步函数
from baiduNovel.writeWord import write_word

def run_rtom():
    """运行Redis到MongoDB同步程序的函数"""
    try:
        print("启动Redis到MongoDB同步程序...")
        rtom_main()  # 调用你的同步函数
    except Exception as e:
        print(f"同步程序出错: {str(e)}")


if __name__ == "__main__":
    # 创建并启动同步程序线程（子线程）
    rtom_thread = threading.Thread(target=run_rtom, daemon=True)
    rtom_thread.start()

    # 稍微延迟一下，确保同步程序先启动
    time.sleep(2)

    # 在主线程中运行爬虫
    try:
        print("启动爬虫程序...")
        cmdline.execute('scrapy crawl baiduFanRen'.split(' '))
    except Exception as e:
        print(f"爬虫程序出错: {str(e)}")
    finally:
        # 爬虫结束后等待同步程序处理剩余数据
        print("爬虫已结束，等待同步程序处理剩余数据...")
        # 给同步程序一些时间处理剩余数据
        time.sleep(10)
        write_word()
        print("程序退出")
