import os
import json
import pickle
import logging
from pathlib import Path
from typing import Tuple, Set, Dict, Optional
import pandas as pd
from source.RFT.utils import logger, generate_scenario_id, load_details_json
from source.RFT.models import ScenarioMetadata
from source.data_processor.utils.experiment_data import ExperimentData

def load_from_cache(cache_path: str) -> Optional[ExperimentData]:
    """
    从缓存文件加载ExperimentData对象
    
    参数:
        cache_path: 缓存文件路径
        
    返回:
        ExperimentData对象，加载失败则返回None
    """
    try:
        with open(cache_path, 'rb') as f:
            return pickle.load(f)
    except Exception as e:
        logger.error(f"加载缓存失败: {cache_path}, 错误: {e}")
        return None

def load_anomalous_trace_ids(results_path: str) -> Set[str]:
    """
    从results.json文件加载异常Trace ID
    
    参数:
        results_path: results.json文件路径
        
    返回:
        异常Trace ID集合
    """
    anomalous_trace_ids = set()
    try:
        with open(results_path, 'r') as f:
            results_data = json.load(f)
            
            # 遍历所有window中的anomalous_traces
            for window in results_data.get("windows", []):
                for trace in window.get("anomalous_traces", []):
                    if "trace_id" in trace:
                        anomalous_trace_ids.add(trace["trace_id"])
    except Exception as e:
        logger.error(f"加载异常Trace ID失败: {results_path}, 错误: {e}")
    
    return anomalous_trace_ids

def load_scenario_data(scenario_path: str) -> Tuple[Optional[ExperimentData], Optional[ScenarioMetadata], Set[str]]:
    """
    加载故障场景数据
    
    参数:
        scenario_path: 场景文件夹路径
        
    返回:
        (ExperimentData, ScenarioMetadata, 异常Trace ID集合)
        如果加载失败，相应的返回值为None
    """
    scenario_path = Path(scenario_path)
    folder_name = scenario_path.name
    
    # 1. 加载ExperimentData
    cache_path = scenario_path / "cache" / "telemetry_loader.pkl"
    exp_data = load_from_cache(str(cache_path))
    
    if exp_data is None:
        logger.error(f"加载实验数据失败: {cache_path}")
        return None, None, set()
    
    # 2. 加载异常Trace ID
    results_path = scenario_path / "detection_results" / "trace_anomalies" / "trace_results" / "results.json"
    anomalous_trace_ids = load_anomalous_trace_ids(str(results_path))
    
    if not anomalous_trace_ids:
        logger.warning(f"没有找到异常Trace ID: {results_path}")
    
    # 3. 尝试从details.json加载故障信息
    details = load_details_json(str(scenario_path))
    
    # 4. 创建场景元数据
    if details and "anomaly_details" in details:
        # 从details.json获取故障信息
        anomaly_details = details["anomaly_details"]
        metadata = ScenarioMetadata(
            scenario_id=generate_scenario_id(),
            folder_name=folder_name,
            anomaly_component=anomaly_details.get("component", exp_data.anomaly_component),
            anomaly_reason=anomaly_details.get("reason", exp_data.anomaly_reason),
            dataset_type="",  # 暂时为空，后续会更新
            trace_count=len(anomalous_trace_ids),
            level_folder=""   # 暂时为空，后续会更新
        )
        logger.info(f"从details.json加载场景数据成功: {folder_name}, 异常组件: {metadata.anomaly_component}, 异常类型: {metadata.anomaly_reason}")
    else:
        # 使用ExperimentData中的信息
        metadata = ScenarioMetadata(
            scenario_id=generate_scenario_id(),
            folder_name=folder_name,
            anomaly_component=exp_data.anomaly_component,
            anomaly_reason=exp_data.anomaly_reason,
            dataset_type="",  # 暂时为空，后续会更新
            trace_count=len(anomalous_trace_ids),
            level_folder=""   # 暂时为空，后续会更新
        )
        logger.info(f"加载场景数据成功: {folder_name}, 异常组件: {exp_data.anomaly_component}, 异常类型: {exp_data.anomaly_reason}")
    
    return exp_data, metadata, anomalous_trace_ids
