import os
import traceback
import numpy as np
import pandas as pd
from scipy import stats
from typing import List, Tuple, Optional, Dict
import multiprocessing as mp
from multiprocessing import Pool
from config.ssq_config import SSQ_CONFIG, init_global_logger, logger  # 导入配置和全局日志

class MultiWindowFractalEngine:
    """多窗口分形维数计算引擎(类函数实现, 支持多进程并行，兼容训练/预测模式，适配Ubuntu)"""
    def __init__(self, run_type: str = None):
        """从配置文件初始化参数"""
        # 校验数据类型合法性
        if run_type not in ["train", "predict"]:
            raise ValueError(f"数据类型错误！仅支持 'train' 或 'predict', 当前输入: {run_type}")
        self.run_type = run_type

        # 基础配置
        self.fractal_config = SSQ_CONFIG["fractal"]
        self.model_file_prefix = self.fractal_config["model_file"]             # 文件名前缀
        self.window_sizes = SSQ_CONFIG["feature_window_size"]  # [13,34,55]
        self.x_box_sizes = self.fractal_config["red_fractal_x_box_sizes"]      # [3,5,7]
        self.y_box_sizes = self.fractal_config["red_fractal_y_box_sizes"]      # [1,2,3]
        self.z_box_sizes = self.fractal_config["red_fractal_z_box_sizes"]      # [3,5,7]
        
        # 根据数据类型确定保存文件名(从配置文件读取, 避免硬编码)
        self.data_folder = SSQ_CONFIG["train_file"]["DATA_FOLDER"]
        self.raw_data_path = os.path.join(self.data_folder, SSQ_CONFIG["train_file"]["csv01_file"])
        self.full_fractal_feature_file = os.path.join(self.data_folder, SSQ_CONFIG["train_file"]["full_fractal_feature_file"])
        if self.run_type == "predict":
            self.data_folder = SSQ_CONFIG["predict_file"]["DATA_FOLDER"]
            self.raw_data_path = os.path.join(self.data_folder, SSQ_CONFIG["predict_file"]["csv01_file"])
            self.full_fractal_feature_file = os.path.join(self.data_folder, SSQ_CONFIG["predict_file"]["full_fractal_feature_file"])
        
        # 红球相关配置
        self.max_red = SSQ_CONFIG["max_red_ball"]
        self.red_cols = [f"r{i}" for i in range(1, self.max_red + 1)]
        
        # 状态存储
        self.process_results: List[str] = []  # 存储各进程的输出文件路径
        init_global_logger(log_file=f"{self.run_type}_fractal_mutil_calc.log", pid=os.getpid())  # 分形特征计算专用日志
        self.final_fractal_feat_df = None  # 最终特征矩阵

    def _get_window_save_path(self, window_size: int) -> str:
        """生成单个窗口的特征保存路径(遵循配置文件格式)"""
        file_name = f"{self.model_file_prefix}{window_size}.csv"
        return os.path.join(os.path.dirname(self.raw_data_path), file_name)

    def _validate_raw_data(self, raw_df: pd.DataFrame, window_size: int) -> bool:
        """验证原始数据有效性"""
        if raw_df.empty:
            logger.error(f"窗口{window_size}期: 原始数据为空\n{traceback.format_exc()}")
            return False
        if len(raw_df) < window_size:
            logger.error(f"窗口{window_size}期: 数据量{len(raw_df)}期不足窗口大小{window_size}期\n{traceback.format_exc()}")
            return False
        required_cols = ["idx", "date"] + self.red_cols
        missing_cols = [col for col in required_cols if col not in raw_df.columns]
        if missing_cols:
            logger.error(f"窗口{window_size}期: 原始数据缺失必要列{missing_cols}\n{traceback.format_exc()}")
            return False
        return True

    def _calculate_local_density(self, window_data: pd.DataFrame, x_box_size: int) -> np.ndarray:
        """计算红球局部聚集度矩阵(内部工具方法)"""
        n_periods, n_reds = window_data.shape
        density_matrix = np.zeros_like(window_data, dtype=int)
        for x_start in range(0, n_reds, x_box_size):
            x_end = min(x_start + x_box_size, n_reds)
            period_density = window_data.iloc[:, x_start:x_end].sum(axis=1).values
            density_matrix[:, x_start:x_end] = period_density.reshape(-1, 1)
        return density_matrix

    def _build_3d_point_cloud(self, window_data: pd.DataFrame, x_box_size: int) -> List[Tuple[int, int, int]]:
        """构建三维点集(内部工具方法)"""
        density_matrix = self._calculate_local_density(window_data, x_box_size)
        point_cloud = []
        for z_rel, (_, row) in enumerate(window_data.iterrows()):
            for x_idx, (red_col, val) in enumerate(zip(self.red_cols, row)):
                if val == 1:
                    x_abs = int(red_col.replace("r", ""))
                    y_val = density_matrix[z_rel, x_idx]
                    point_cloud.append((x_abs, y_val, z_rel))
        return point_cloud

    def _count_boxes(self, point_cloud: List[Tuple[int, int, int]], x_size: int, y_size: int, z_size: int) -> int:
        """统计有效盒子数量(内部工具方法)"""
        if not point_cloud:
            return 0
        valid_boxes = set()
        for (x, y, z) in point_cloud:
            box_code = (x // x_size, y // y_size, z // z_size)
            valid_boxes.add(box_code)
        return len(valid_boxes)

    def _fit_fractal_dimension(self, box_counts: List[int], scales: List[float]) -> Tuple[Optional[float], Optional[float]]:
        """拟合分形维数和R2(内部工具方法)"""
        valid_pairs = [(np.log10(s), np.log10(n)) for s, n in zip(scales, box_counts) if n > 0]
        if len(valid_pairs) < 2:
            logger.warning("有效尺度组合不足2组, 拟合失败")
            return None, None
        x_data = np.array([p[0] for p in valid_pairs])
        y_data = np.array([p[1] for p in valid_pairs])
        slope, _, r_squared, _, _ = stats.linregress(x_data, y_data)
        if r_squared < 0.6:
            logger.warning(f"拟合优度R2={r_squared:.3f} < 0.6, 结果可靠性低")
        return slope, r_squared

    def calculate_single_window(self, window_size: int) -> str:
        """单窗口分形特征计算(训练：多行数据；预测：仅最后1行数据)"""
        try:
            logger.info(f'3. 单窗口分形特征计算 - 模式：{self.run_type}，窗口大小：{window_size}')
            # 加载并验证原始数据
            raw_df = pd.read_csv(
                self.raw_data_path,
                usecols=["idx", "date"] + self.red_cols,
                encoding="utf-8"
            ).sort_values("idx").reset_index(drop=True)
            
            if not self._validate_raw_data(raw_df, window_size):
                raise RuntimeError(f"窗口{window_size}期数据验证失败")
            
            total_periods = len(raw_df)
            results = []
            save_path = self._get_window_save_path(window_size)
            
            # 核心：区分训练/预测的计算逻辑
            if self.run_type == "predict":
                # 预测模式：仅计算最后1个完整窗口（对应待预测期的特征）
                start_idx = total_periods - window_size  # 最后一个窗口的起始索引
                end_idx = start_idx + window_size
                window_df = raw_df.iloc[start_idx:end_idx][self.red_cols]
                # 待预测期号：原始数据最后一行idx+1（假设原始数据是截至上一期的历史数据）
                current_issue = raw_df.iloc[-1]["idx"] + 1
                
                # 多尺度盒子计数（复用训练逻辑）
                all_box_counts = []
                all_scales = []
                for x_size in self.x_box_sizes:
                    point_cloud = self._build_3d_point_cloud(window_df, x_size)
                    if not point_cloud:
                        continue
                    for y_size in self.y_box_sizes:
                        for z_size in self.z_box_sizes:
                            box_count = self._count_boxes(point_cloud, x_size, y_size, z_size)
                            box_volume = x_size * y_size * z_size
                            scale = 1 / np.cbrt(box_volume)
                            all_box_counts.append(box_count)
                            all_scales.append(scale)
                
                # 拟合特征并保存（仅1行数据）
                fractal_dim, r_squared = self._fit_fractal_dimension(all_box_counts, all_scales)
                if fractal_dim is not None and r_squared is not None:
                    results.append({
                        "idx": current_issue,
                        f"fractal_dim_{window_size}": round(fractal_dim, 6),
                        f"fractal_r2_{window_size}": round(r_squared, 6),
                        f"avg_box_count_{window_size}": round(np.mean(all_box_counts), 3),
                        f"scale_min_{window_size}": round(np.min(all_scales), 3),
                        f"scale_max_{window_size}": round(np.max(all_scales), 3),
                        f"scale_span_{window_size}": round(np.max(all_scales) - np.min(all_scales), 3)
                    })
                logger.info(f"预测模式 - 窗口{window_size}期计算完成，仅保留待预测期1行数据（期号：{current_issue}）")
            
            else:
                # 训练模式：保持原逻辑（逐期滚动计算）
                for start_idx in range(total_periods - window_size + 1):
                    end_idx = start_idx + window_size
                    window_df = raw_df.iloc[start_idx:end_idx][self.red_cols]
                    current_issue = raw_df.iloc[end_idx - 1]["idx"]
                    
                    # 多尺度盒子计数
                    all_box_counts = []
                    all_scales = []
                    for x_size in self.x_box_sizes:
                        point_cloud = self._build_3d_point_cloud(window_df, x_size)
                        if not point_cloud:
                            continue
                        for y_size in self.y_box_sizes:
                            for z_size in self.z_box_sizes:
                                box_count = self._count_boxes(point_cloud, x_size, y_size, z_size)
                                box_volume = x_size * y_size * z_size
                                scale = 1 / np.cbrt(box_volume)
                                all_box_counts.append(box_count)
                                all_scales.append(scale)
                    
                    # 拟合特征并保存(仅保留真实计算结果)
                    fractal_dim, r_squared = self._fit_fractal_dimension(all_box_counts, all_scales)
                    if fractal_dim is not None and r_squared is not None:
                        results.append({
                            "idx": current_issue,
                            f"fractal_dim_{window_size}": round(fractal_dim, 6),
                            f"fractal_r2_{window_size}": round(r_squared, 6),
                            f"avg_box_count_{window_size}": round(np.mean(all_box_counts), 3),
                            f"scale_min_{window_size}": round(np.min(all_scales), 3),
                            f"scale_max_{window_size}": round(np.max(all_scales), 3),
                            f"scale_span_{window_size}": round(np.max(all_scales) - np.min(all_scales), 3)
                        })
                    
                    # 进度日志
                    if start_idx % 200 == 0:
                        progress = (start_idx + 1) / (total_periods - window_size + 1) * 100
                        logger.info(f"训练模式 - 窗口{window_size}期: 计算进度{progress:.1f}%, 当前期号{current_issue}")
            
            # 保存结果（训练：多行；预测：1行）
            result_df = pd.DataFrame(results).sort_values("idx")
            result_df.to_csv(save_path, index=False, encoding="utf-8")
            logger.info(f"窗口{window_size}期结果保存至{save_path}，数据量：{len(result_df)}行")
            return save_path
        
        except Exception as e:
            logger.error(f"窗口{window_size}期计算失败: {e}\n{traceback.format_exc()}")
            raise

    @staticmethod
    def _process_worker(window_size: int, engine_instance: "MultiWindowFractalEngine") -> str:
        """多进程工作器包装(解决类方法序列化问题)"""
        logger.info(f'2. 多进程工作器启动 - 处理窗口：{window_size}')
        return engine_instance.calculate_single_window(window_size)

    def run_multi_process(self) -> bool:
        """启动多进程并行计算所有窗口（仅适配Ubuntu系统）"""
        try:
            logger.info(f'1. 启动多进程并行计算 - 模式：{self.run_type}，窗口列表：{self.window_sizes}')
            # 验证原始数据文件存在性
            if not os.path.exists(self.raw_data_path):
                logger.error(f"原始数据文件不存在: {self.raw_data_path}\n{traceback.format_exc()}")
                return False
            
            # Ubuntu系统专用：使用fork方式启动多进程
            mp.set_start_method("fork", force=True)
            logger.info("Ubuntu系统，使用'fork'方式启动多进程")
            
            # 启动进程池（进程数=窗口数，并行计算）
            with Pool(processes=len(self.window_sizes)) as pool:
                logger.info(f"启动{len(self.window_sizes)}个进程，并行计算所有窗口")
                # 提交任务并获取结果
                self.process_results = pool.starmap(
                    self._process_worker,
                    [(ws, self) for ws in self.window_sizes]
                )
            
            # 验证所有进程执行成功
            for path in self.process_results:
                if not os.path.exists(path):
                    raise RuntimeError(f"进程输出文件缺失: {path}")
            logger.info("所有窗口分形计算进程执行完成")
            return True
        
        except Exception as e:
            logger.critical(f"多进程执行失败: {e}\n{traceback.format_exc()}")
            return False

    def merge_window_features(self) -> bool:
        """合并所有窗口的分形特征（训练：多行；预测：1行）"""
        try:
            # 加载所有窗口的特征文件
            window_dfs = []
            for window_size in self.window_sizes:
                file_path = self._get_window_save_path(window_size)
                if not os.path.exists(file_path):
                    logger.error(f"窗口{window_size}期特征文件缺失: {file_path}\n{traceback.format_exc()}")
                    return False
                
                window_df = pd.read_csv(file_path, encoding="utf-8")
                window_dfs.append(window_df)
                logger.info(f"加载窗口{window_size}期特征，数据量：{len(window_df)}行")
            
            # 区分训练/预测的合并逻辑
            if self.run_type == "predict":
                # 预测模式：内连接合并（确保仅保留待预测期1行数据）
                self.final_fractal_feat_df = window_dfs[0]
                for df in window_dfs[1:]:
                    self.final_fractal_feat_df = self.final_fractal_feat_df.merge(df, on="idx", how="inner")
                # 强制保留最后1行（避免合并后数据异常）
                self.final_fractal_feat_df = self.final_fractal_feat_df.tail(1).reset_index(drop=True)
                
                # 与原始数据对齐，补充date字段（若需要）
                raw_df = pd.read_csv(
                    self.raw_data_path,
                    usecols=["idx", "date"],
                    encoding="utf-8"
                ).sort_values("idx")
                # 预测期的date可设为None或按实际需求填充
                predict_date = None
                self.final_fractal_feat_df["date"] = predict_date
                
            else:
                # 训练模式：保持原逻辑（合并后筛选有效期号）
                min_valid_issue = None
                for df in window_dfs:
                    current_min_issue = df["idx"].min()
                    if min_valid_issue is None or current_min_issue > min_valid_issue:
                        min_valid_issue = current_min_issue
                # 合并特征并筛选>=最小有效期号的数据
                self.final_fractal_feat_df = window_dfs[0]
                for df in window_dfs[1:]:
                    self.final_fractal_feat_df = self.final_fractal_feat_df.merge(df, on="idx", how="inner")
                self.final_fractal_feat_df = self.final_fractal_feat_df[self.final_fractal_feat_df["idx"] >= min_valid_issue].sort_values("idx")
                
                # 与原始数据对齐，保留idx和date字段
                raw_df = pd.read_csv(
                    self.raw_data_path,
                    usecols=["idx", "date"],
                    encoding="utf-8"
                ).sort_values("idx")
                self.final_fractal_feat_df = self.final_fractal_feat_df.merge(raw_df, on="idx", how="left")
            
            # 保存合并结果
            self.final_fractal_feat_df.to_csv(self.full_fractal_feature_file, index=False, encoding="utf-8")
            logger.info(f"多窗口特征合并完成，保存至{self.full_fractal_feature_file}")
            logger.info(f"合并后特征维度: {len(self.final_fractal_feat_df.columns)}列, 数据量: {len(self.final_fractal_feat_df)}行")
            return True
        
        except Exception as e:
            logger.error(f"特征合并失败: {e}\n{traceback.format_exc()}")
            return False

    def do_fractal_features(self) -> bool:
        """执行完整流程: 多进程计算 → 特征合并"""
        logger.info("="*50)
        logger.info(f"启动多窗口分形维数计算完整流程 - 模式：{self.run_type}")
        logger.info("="*50)
        
        # 步骤1: 多进程计算各窗口特征
        if not self.run_multi_process():
            logger.error(f"多进程计算阶段失败, 终止流程.{traceback.format_exc()}")
            return False
        
        # 步骤2: 合并所有窗口特征
        if not self.merge_window_features():
            logger.error(f"特征合并阶段失败, 终止流程.{traceback.format_exc()}")
            return False
        
        logger.info("="*50)
        logger.info(f"多窗口分形维数计算完整流程执行成功 - 模式：{self.run_type}")
        logger.info("="*50)
        return True


# ---------------------- 执行入口 ----------------------
if __name__ == "__main__":
    # 训练模式执行（默认）
    # fractal_engine = MultiWindowFractalEngine(run_type="train")
    # 预测模式执行（注释上面，启用下面）
    fractal_engine = MultiWindowFractalEngine(run_type="predict")
    fractal_engine.do_fractal_features()