import os
import time
from configparser import ConfigParser
import sys
from pathlib import Path
# 添加项目根目录到系统路径（关键步骤）
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

# 从自定义包导入功能
from modules import (
    setup_logger,
    DataCollector,
    DataCleaner,
    FeatureExtractor,
    DataAugmenter,
    DataStandardizer,
    save_raw_data_to_csv,
    save_to_csv
)

def main():
    # 初始化日志系统
    logger = setup_logger()
    logger.info("舆情监控系统启动")
    
    try:
        # 加载配置
        config = ConfigParser()
        config_path = Path(__file__).parent / "config.ini"  # 配置文件与脚本同目录
        config.read(config_path, encoding='utf-8')  # ✅ 使用Path对象传递绝对路径
        
        # 初始化核心模块
        collector = DataCollector(cookie=config['weibo']['cookie'])
        cleaner = DataCleaner()
        feature_extractor = FeatureExtractor()
        augmenter = DataAugmenter()
        standardizer = DataStandardizer()
        
        # ================= 数据采集 =================
        logger.info("开始采集微博数据...")
        raw_posts = []
        for attempt in range(3):
            try:
                raw_posts = collector.crawl_weibo(
                    keyword=config['weibo']['keyword'],
                    pages=int(config['weibo']['pages'])
                )
                if raw_posts:
                    logger.info(f"成功采集{len(raw_posts)}条原始数据")
                    break
                logger.warning(f"第{attempt+1}次采集返回空数据")
            except Exception as e:
                logger.warning(f"采集失败: {str(e)}")
                time.sleep(2)
        
        if not raw_posts:
            logger.error("数据采集失败，系统退出")
            return

        # 保存原始数据
        save_to_csv(raw_posts)
        logger.info(f"原始数据已保存，数据量: {len(raw_posts)}条")
        breakpoint()
        # ================= 数据清洗 =================
        logger.info("开始数据清洗...")
        cleaned_posts = cleaner.clean(raw_posts)
        logger.info(f"清洗后数据量: {len(cleaned_posts)}条")
        breakpoint()
        # ================= 特征提取 =================
        logger.info("开始特征提取...")
        for i, post in enumerate(cleaned_posts):
            if i % 50 == 0:
                logger.info(f"已处理 {i}/{len(cleaned_posts)} 条数据")
            feature_extractor.extract_features(post)
        breakpoint()
        # ================= 数据增强 =================
        logger.info("执行数据增强...")
        augmented_posts = augmenter.augment(cleaned_posts)
        logger.info(f"增强后数据量: {len(augmented_posts)}条")
        breakpoint()
        # ================= 数据标准化 =================
        logger.info("执行数据标准化...")
        standardized_posts = standardizer.fit_transform(augmented_posts)
        standardizer.save_scaler()
        breakpoint()
        # ================= 数据存储 =================
        logger.info("存储处理后的数据...")
        save_raw_data_to_csv(standardized_posts)
        logger.info(f"成功存储{len(standardized_posts)}条处理数据")
        breakpoint()
    except Exception as e:
        logger.error(f"系统发生严重错误: {str(e)}", exc_info=True)
    finally:
        logger.info("舆情监控系统运行结束")

if __name__ == "__main__":
    main()
    