# -*- coding: utf-8 -*-
import sys

from app.dify.judge_worker import JudgeWorker
from app.myttl.ttl_worker import TtlWorker
from app.pacong.baidu_crawler import BaiduHotListFetcher
from app.pacong.ithome_crawler import ItHomeHotListFetcher
from app.pacong.toutiao_crawler import ToutiaoHotListFetcher
from app.pacong.weibo_crawler import WeiboHotListFetcher

print("Module search path:"+str(sys.path))
from time import sleep
import asyncio
from app.pacong.zhihu_hot_crawler import ZhihuHotListFetcher
from app.pacong.bilibili_hot_search_crawler import BilibiliHotSearchCrawler
from app.pacong.hacker_news_crawler import HackerNewsCrawler
from app.pacong.tieba_hot_crawler import TiebaHotTopicCrawler
from app.pacong.cankaoxiaoxi_crawler import CankaoxiaoxiCrawler
from app.pacong.xueqiu_hotstock_crawler import XueqiuHotStockCrawler
from app.pacong.douyin_hot_crawler import DouyinHotSearchCrawler
from app.pacong.fastbull_crawler import FastBullExpressCrawler, FastBullNewsCrawler
from app.pacong.gelonghui_crawler import GelonghuiCrawler
from app.pacong.thepaper_crawler import ThePaperCrawler
from app.pacong.v2ex_crawler import V2exCrawler
from app.pacong.kr36_crawler import Kr36HotListFetcher

def test_celery(word: str) -> str:
    for i in range(1, 11):
        sleep(1)
    return f"test task return {word}"

def run_baidu_crawler() -> str:
    """运行百度热搜爬虫"""
    try:
        crawler = BaiduHotListFetcher()
        crawler.fetch_and_save()
        return "百度热搜数据抓取完成"
    except Exception as e:
        return f"百度热搜爬虫运行失败: {str(e)}"

def run_weibo_crawler() -> str:
    """运行微博热搜爬虫"""
    try:
        crawler = WeiboHotListFetcher()
        crawler.fetch_and_save()
        return "微博热搜数据抓取完成"
    except Exception as e:
        return f"微博热搜爬虫运行失败: {str(e)}"

def run_toutiao_crawler() -> str:
    """运行头条热榜爬虫"""
    try:
        crawler = ToutiaoHotListFetcher()
        crawler.fetch_and_save()
        return "头条热榜数据抓取完成"
    except Exception as e:
        return f"头条热榜爬虫运行失败: {str(e)}"

def run_bilibili_crawler() -> str:
    """运行B站热搜爬虫"""
    try:
        crawler = BilibiliHotSearchCrawler()
        crawler.fetch_and_save()
        return "B站热搜数据抓取完成"
    except Exception as e:
        return f"B站热搜爬虫运行失败: {str(e)}"

def run_hackernews_crawler() -> str:
    """运行Hacker News爬虫"""
    try:
        crawler = HackerNewsCrawler()
        crawler.fetch_and_save()
        return "Hacker News数据抓取完成"
    except Exception as e:
        return f"Hacker News爬虫运行失败: {str(e)}"

def run_tieba_crawler() -> str:
    """运行百度贴吧热门话题爬虫"""
    try:
        crawler = TiebaHotTopicCrawler()
        crawler.fetch_and_save()
        return "贴吧热门话题数据抓取完成"
    except Exception as e:
        return f"贴吧热门话题爬虫运行失败: {str(e)}"

def run_cankaoxiaoxi_crawler() -> str:
    """运行参考消息爬虫"""
    try:
        crawler = CankaoxiaoxiCrawler()
        crawler.fetch_and_save()
        return "参考消息数据抓取完成"
    except Exception as e:
        return f"参考消息爬虫运行失败: {str(e)}"

def run_xueqiu_crawler() -> str:
    """运行雪球热门股票爬虫"""
    try:
        crawler = XueqiuHotStockCrawler()
        crawler.fetch_and_save()
        return "雪球热门股票数据抓取完成"
    except Exception as e:
        return f"雪球热门股票爬虫运行失败: {str(e)}"

def run_douyin_crawler() -> str:
    """运行抖音热搜爬虫"""
    try:
        crawler = DouyinHotSearchCrawler()
        crawler.fetch_and_save()
        return "抖音热搜数据抓取完成"
    except Exception as e:
        return f"抖音热搜爬虫运行失败: {str(e)}"

def run_fastbull_express_crawler() -> str:
    """运行FastBull快讯爬虫"""
    try:
        crawler = FastBullExpressCrawler()
        result = crawler.fetch_express()
        return f"成功获取 {len(result)} 条FastBull快讯数据"
    except Exception as e:
        return f"FastBull快讯爬虫运行失败: {str(e)}"

def run_fastbull_news_crawler() -> str:
    """运行FastBull新闻爬虫"""
    try:
        crawler = FastBullNewsCrawler()
        result = crawler.fetch_news()
        return f"成功获取 {len(result)} 条FastBull新闻数据"
    except Exception as e:
        return f"FastBull新闻爬虫运行失败: {str(e)}"

def run_gelonghui_crawler() -> str:
    """运行格隆汇爬虫"""
    try:
        crawler = GelonghuiCrawler()
        crawler.fetch_and_save()
        return "格隆汇数据抓取完成"
    except Exception as e:
        return f"格隆汇爬虫运行失败: {str(e)}"

def run_thepaper_crawler() -> str:
    """运行澎湃新闻爬虫"""
    try:
        crawler = ThePaperCrawler()
        crawler.fetch_and_save()
        return "澎湃新闻数据抓取完成"
    except Exception as e:
        return f"澎湃新闻爬虫运行失败: {str(e)}"

def run_v2ex_crawler() -> str:
    """运行V2EX爬虫"""
    try:
        crawler = V2exCrawler()
        crawler.fetch_and_save()
        return "V2EX数据抓取完成"
    except Exception as e:
        return f"V2EX爬虫运行失败: {str(e)}"

def run_ithome_crawler() -> str:
    """运行IT之家爬虫"""
    try:
        crawler = ItHomeHotListFetcher()
        crawler.fetch_and_save()
        return "IT之家数据抓取完成"
    except Exception as e:
        return f"IT之家爬虫运行失败: {str(e)}"

def run_zhihu_crawler() -> str:
    """运行知乎热榜爬虫"""
    try:
        crawler = ZhihuHotListFetcher()
        crawler.fetch_and_save()
        return "知乎热榜数据抓取完成"
    except Exception as e:
        return f"知乎热榜爬虫运行失败: {str(e)}"

def run_judge_worker() -> str:
    """运行内容评判任务"""
    try:
        worker = JudgeWorker()
        worker.run()
        # TODO: 实现内容评判逻辑
        return "内容评判任务执行完成"
    except Exception as e:
        return f"内容评判任务执行失败: {str(e)}"

def run_kr36_crawler() -> str:
    """运行36氪热榜爬虫"""
    try:
        crawler = Kr36HotListFetcher()
        crawler.fetch_and_save()
        return "36氪热榜数据抓取完成"
    except Exception as e:
        return f"36氪热榜爬虫运行失败: {str(e)}"


def run_ttl_worker() -> str:
    """运行TTLWorker"""
    try:
        worker = TtlWorker()
        worker.run()
        return "TTLWorker执行完成"
    except Exception as e:
        return f"TTLWorker执行失败: {str(e)}"
