import os
import re
import logging
import numpy as np
import pandas as pd
import matplotlib as mpl
# 设置Agg后端，确保在无GUI环境下也能正常生成图像
mpl.use('Agg')
import matplotlib.pyplot as plt
from concurrent.futures import ThreadPoolExecutor
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
import joblib

from config import *
from utils import (
    setup_logging, calculate_optimal_window_size, calculate_optimal_threshold,
    process_file, find_steady_state, plot_speed_data, evaluate_model,
    analyze_model_complexity, plot_combined_calibration_data, train_model,
    setup_chinese_font
)

def process_data_folder(folder_config, plots_dir, log_dir):
    """处理单个数据文件夹"""
    folder_path = folder_config['path']
    folder_name = folder_config['name']
    commands = folder_config['commands']
    
    logging.info(f"开始处理数据文件夹: {folder_name} ({folder_path})")
    
    # 创建文件夹特定的输出目录
    folder_plots_dir = os.path.join(plots_dir, folder_name.replace(" ", "_"))
    os.makedirs(folder_plots_dir, exist_ok=True)
    
    # 使用线程池处理文件
    data = {}
    with ThreadPoolExecutor() as executor:
        futures = []
        for filename, command in commands.items():
            file_path = os.path.join(folder_path, filename)
            futures.append(executor.submit(process_file, file_path, command))

        for future in futures:
            df = future.result()
            if not df.empty:
                data[df['Command'].iloc[0]] = df

    if not data:
        logging.error(f"文件夹 {folder_name} 中没有找到有效数据")
        return None

    # 处理每个文件的数据
    calibration_data = []
    for command, df in sorted(data.items()):
        if len(df) < PROCESSING_CONFIG['min_data_points']:
            logging.warning(f"指令 {command} 的数据点不足")
            continue

        logging.info(f"处理指令 {command} 的数据，共 {len(df)} 个数据点")

        try:
            # 计算最优参数
            window_size = calculate_optimal_window_size(df, PROCESSING_CONFIG['window_size_ratio'])
            diff_threshold = calculate_optimal_threshold(df, PROCESSING_CONFIG['diff_threshold_ratio'])
            
            logging.info(f"计算参数: window_size={window_size}, diff_threshold={diff_threshold:.6f}")

            # 查找稳态数据
            steady_state_df = find_steady_state(df, window_size, diff_threshold)
            
            if steady_state_df.empty:
                logging.warning(f"指令 {command} 没有找到稳态数据，跳过")
                continue

            # 计算稳态速度
            steady_state_speed = steady_state_df['Speed'].mean()
            logging.info(f"指令 {command} 的稳态速度: {steady_state_speed:.6f} m/s")
            
            calibration_data.append([command, steady_state_speed])

            # 绘制数据图
            try:
                plot_speed_data(df, steady_state_df, steady_state_speed, command, folder_plots_dir)
                logging.info(f"已保存指令 {command} 的速度曲线图")
            except Exception as e:
                logging.error(f"绘制指令 {command} 的图形时出错: {e}")
                
        except Exception as e:
            logging.error(f"处理指令 {command} 时出错: {e}")
            continue

    # 如果没有找到任何有效的校准数据，返回None
    if not calibration_data:
        logging.error(f"文件夹 {folder_name} 中没有找到任何有效的稳态数据")
        return None
        
    # 创建标定数据DataFrame
    calibration_df = pd.DataFrame(calibration_data, columns=['Command', 'SteadyStateSpeed'])
    logging.info(f"文件夹 {folder_name} 的标定数据:\n{calibration_df}")

    # 保存标定结果
    results_file = os.path.join(log_dir, f"{folder_name.replace(' ', '_')}_{OUTPUT_CONFIG['results_file']}")
    calibration_df.to_csv(results_file, index=False)
    logging.info(f"标定结果已保存到 {results_file}")

    # 模型训练
    if len(calibration_df) >= 3:  # 至少需要3个点才能拟合二次曲线
        X = calibration_df['Command'].values
        y = calibration_df['SteadyStateSpeed'].values

        try:
            model, poly = train_model(X, y)
            
            # 保存模型
            model_file = os.path.join(log_dir, f"{folder_name.replace(' ', '_')}_{OUTPUT_CONFIG['model_file']}")
            poly_file = os.path.join(log_dir, f"{folder_name.replace(' ', '_')}_{OUTPUT_CONFIG['poly_file']}")
            joblib.dump(model, model_file)
            joblib.dump(poly, poly_file)
            logging.info(f"文件夹 {folder_name} 的模型已保存到 {model_file}")
            
            # 返回数据和模型
            return {
                'name': folder_name,
                'data': calibration_df,
                'model': model,
                'poly': poly
            }
            
        except Exception as e:
            logging.error(f"文件夹 {folder_name} 的模型训练出错: {e}")
            return {
                'name': folder_name,
                'data': calibration_df
            }
    else:
        logging.warning(f"文件夹 {folder_name} 的数据点不足，无法拟合模型")
        return {
            'name': folder_name,
            'data': calibration_df
        }

def main():
    # 设置日志
    log_dir = os.path.dirname(os.path.abspath(__file__))
    log_file = os.path.join(log_dir, OUTPUT_CONFIG['log_file'])
    setup_logging(log_file)
    logging.info("开始数据分析")
    
    # 创建输出目录
    plots_dir = os.path.join(log_dir, VISUALIZATION_CONFIG['save_path'])
    os.makedirs(plots_dir, exist_ok=True)

    # 处理所有数据文件夹
    all_datasets = {}
    all_calibration_data = []
    
    for folder_config in DATA_CONFIG['data_folders']:
        # 处理单个文件夹
        if DATA_CONFIG['separate']:
            dataset = process_data_folder(folder_config, plots_dir, log_dir)
            if dataset:
                folder_name = dataset['name']
                all_datasets[folder_name] = dataset
                
                # 收集所有标定数据
                if 'data' in dataset:
                    for _, row in dataset['data'].iterrows():
                        all_calibration_data.append([
                            row['Command'], 
                            row['SteadyStateSpeed'], 
                            folder_name
                        ])
        else:
            # 即使不单独分析，也需要收集数据用于合并分析
            dataset = process_data_folder(folder_config, plots_dir, log_dir)
            if dataset and 'data' in dataset:
                folder_name = dataset['name']
                for _, row in dataset['data'].iterrows():
                    all_calibration_data.append([
                        row['Command'], 
                        row['SteadyStateSpeed'], 
                        folder_name
                    ])
    
    # 如果需要合并处理
    if DATA_CONFIG['combined'] and all_calibration_data:
        # 创建合并的标定数据DataFrame
        combined_df = pd.DataFrame(all_calibration_data, 
                                 columns=['Command', 'SteadyStateSpeed', 'Dataset'])
        
        # 保存合并标定结果
        combined_results_file = os.path.join(log_dir, OUTPUT_CONFIG['combined_results_file'])
        combined_df.to_csv(combined_results_file, index=False)
        logging.info(f"合并标定结果已保存到 {combined_results_file}")
        
        # 合并模型训练
        if len(combined_df) >= 3:
            X = combined_df['Command'].values
            y = combined_df['SteadyStateSpeed'].values
            
            try:
                model, poly = train_model(X, y)
                
                # 保存合并模型
                model_file = os.path.join(log_dir, f"combined_{OUTPUT_CONFIG['model_file']}")
                poly_file = os.path.join(log_dir, f"combined_{OUTPUT_CONFIG['poly_file']}")
                joblib.dump(model, model_file)
                joblib.dump(poly, poly_file)
                logging.info(f"合并模型已保存到 {model_file}")
                
                # 添加到数据集合中
                all_datasets['combined'] = {
                    'name': '合并数据',
                    'data': combined_df,
                    'model': model,
                    'poly': poly
                }
                
            except Exception as e:
                logging.error(f"合并模型训练出错: {e}")
        else:
            logging.warning("合并数据点不足，无法拟合模型")
    
    # 绘制合并图表
    if VISUALIZATION_CONFIG['combined_plot'] and len(all_datasets) > 0:
        try:
            plot_combined_calibration_data(all_datasets, plots_dir)
        except Exception as e:
            logging.error(f"绘制合并图表时出错: {e}")
    
    logging.info("数据分析完成")

if __name__ == '__main__':
    try:
        main()
    except Exception as e:
        logging.error(f"程序执行出错: {e}", exc_info=True)
    finally:
        plt.close('all')