from fastapi import FastAPI, HTTPException
import config
import db
from base.base_crawler import AbstractCrawler
from media_platform.bilibili import BilibiliCrawler
from media_platform.douyin import DouYinCrawler
from media_platform.kuaishou import KuaishouCrawler
from media_platform.tieba import TieBaCrawler
from media_platform.weibo import WeiboCrawler
from media_platform.xhs import XiaoHongShuCrawler
from media_platform.zhihu import ZhihuCrawler
from fastapi.middleware.cors import CORSMiddleware


class CrawlerFactory:
    CRAWLERS = {
        "xhs": XiaoHongShuCrawler,
        "dy": DouYinCrawler,
        "ks": KuaishouCrawler,
        "bili": BilibiliCrawler,
        "wb": WeiboCrawler,
        "tieba": TieBaCrawler,
        "zhihu": ZhihuCrawler
    }

    @staticmethod
    def create_crawler(platform: str) -> AbstractCrawler:
        crawler_class = CrawlerFactory.CRAWLERS.get(platform)
        if not crawler_class:
            raise ValueError("Invalid Media Platform Currently only supported xhs or dy or ks or bili ...")
        return crawler_class() # Assuming main.py is in the same directory and main function is updated to accept parameters

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # 允许所有来源。你可以指定特定的来源。
    allow_credentials=True,
    allow_methods=["*"],  # 允许所有方法（GET, POST, OPTIONS, 等）
    allow_headers=["*"],  # 允许所有请求头
)
@app.post("/start-crawler/")
async def start_crawler(platform: str, keyWord: str, crawlerType: str):
    """
    Start the crawler for a given platform and save option.
    Args:
        platform: The platform to crawl (e.g., "xhs", "dy").
        save_data_option: The option for saving data (e.g., "db").
    Returns:
        A status message indicating success or failure.
        :param platform:
        :param crawlerType:
        :param keyWord:
    """

    config.CRAWLER_TYPE = crawlerType
    config.KEYWORDS = keyWord
    config.PLATFORM = platform

    try:
        # init db
        if config.SAVE_DATA_OPTION == "db":
            await db.init_db()

        crawler = CrawlerFactory.create_crawler(config.PLATFORM)
        await crawler.start()

        if config.SAVE_DATA_OPTION == "db":
            await db.close()

        return {
            "msg": "Crawler started successfully",
            "code": 200
        }

    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == '__main__':
    import uvicorn
    uvicorn.run(app, host='0.0.0.0', port=8001)