import os
from Login import USER_FILE, register, login
from Crawl import crawl_news
from Clear import clean_data
from Analysis import analyze_data
from MongoDB import save_to_mongodb
from Visualization import dynamic_visualization
from FastAPI import app
from Login import USER_FILE


def main():
    # 用户系统
    if not os.path.exists(USER_FILE):
        print("首次使用需要注册")
        register()

    if login():
        # 爬取数据
        news = crawl_news()

        if not news:
            print("未获取到新闻数据")
            return

        # 数据清洗
        cleaned_news = clean_data(news)
        print(f"获取到{len(cleaned_news)}条有效新闻")

        # 基础分析
        top_words = analyze_data(cleaned_news)
        print("\n高频词汇TOP20:")
        for word, count in top_words:
            print(f"{word}: {count}次")

        # 拓展功能
        save_to_mongodb(cleaned_news)  # MongoDB存储
        dynamic_visualization(cleaned_news)  # 动态可视化

        print("\n可视化结果已保存: wordcloud.png 和 timeline.html")
        print("API服务已启动: 访问 http://localhost:8000/news 获取数据")

        # 启动API服务
        import uvicorn
        uvicorn.run(app, host="0.0.0.0", port=8000)


if __name__ == "__main__":
    main()