#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
@Author: kindey
@Date: 2025/9/9
@Description: 
"""

import logging
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns

from scipy.fft import fft, fftfreq
from scipy.signal import find_peaks
from tsfresh import extract_features
from tsfresh.feature_extraction import MinimalFCParameters, EfficientFCParameters
from tsfresh.feature_extraction.settings import ComprehensiveFCParameters
from statsmodels.tsa.seasonal import seasonal_decompose
import pywt

from apps.services.dev_data_service import DevDataService as DevDS

class YaliTimeService:
    logger = logging.getLogger(__name__)

    def __init__(self):
        self.logger.info("YaliTimeService init")
    @staticmethod
    def __get_df():
        """
        获取数据
        """
        data_type = "rec"
        d = DevDS()
        columns = ['data_time', 'data_value']
        start_time = '2024-11-06 00:00:00'
        end_time = '2024-12-30 23:59:59'
        dev_id = 70
        dev_data_list = d.get_dev_data_by_condition(data_type, dev_id, start_time, end_time)
        df = pd.DataFrame(dev_data_list, columns=columns)
        df.rename(columns={'data_value': 'ja_li'}, inplace=True)
        df['data_time'] = pd.to_datetime(df['data_time'])
        return df

    @staticmethod
    def get_feature():
        df = YaliTimeService.__get_df()

        # 添加 id 列（tsfresh 需要）
        df['id'] = 1

        # 配置特征提取参数，重点提取周期性相关特征
        extraction_settings = {
            "mean": None,
            "median": None,
            "standard_deviation": None,
            "variance": None,
            "autocorrelation": [{"lag": lag} for lag in range(1, 21)],
            "partial_autocorrelation": [{"lag": lag} for lag in range(1, 11)],
            "agg_autocorrelation": [{"f_agg": "mean", "maxlag": 40},
                                    {"f_agg": "median", "maxlag": 40}],
            "number_cwt_peaks": [{"n": n} for n in [1, 5]],
            "number_peaks": [{"n": n} for n in [1, 3, 5, 10, 50]],
            "fft_coefficient": [{"coeff": coeff, "attr": attr}
                                for coeff in range(0, 10)
                                for attr in ["real", "imag", "abs", "angle"]]
        }

        # 使用 tsfresh 提取特征
        extracted_features = extract_features(
            df,
            column_id='id',
            column_sort='data_time',
            default_fc_parameters=extraction_settings
        )

        # # 使用 tsfresh 提取特征
        # extracted_features = extract_features(df, column_id='id', column_sort='data_time')

        # 调试信息
        logging.debug("特征数量:%s", extracted_features.shape)
        logging.debug("列名示例:%s", extracted_features.columns[:5].tolist())
        logging.debug("数据示例:%s", extracted_features.iloc[0, :5])
        logging.debug("所有特征名称:")
        for i, col in enumerate(extracted_features.columns):
            logging.debug(f"{i + 1}. {col}")

        # 输出特征数据基本信息
        logging.debug(f"特征数据形状: {extracted_features.shape}")
        logging.debug(f"特征统计信息:\n{extracted_features.describe()}")

        # 输出所有特征名称和值
        logging.debug("所有特征名称和值:")
        for i, col in enumerate(extracted_features.columns):
            logging.debug(f"{i + 1}. {col}: {extracted_features[col].iloc[0]}")

    @staticmethod
    def __extract_periodicity_fft(df):
        """
        使用傅里叶变换提取周期性特征
        """
        # 获取数据值
        values = df['ja_li'].values
        sampling_interval = 20 * 60  # 20分钟采样间隔，单位秒

        # 执行快速傅里叶变换
        fft_values = fft(values)
        frequencies = fftfreq(len(values), d=sampling_interval)

        # 计算幅度谱
        magnitude = np.abs(fft_values)

        # 找到主要频率峰值（只考虑正频率部分）
        positive_freq_mask = frequencies > 0
        positive_magnitude = magnitude[positive_freq_mask]
        positive_frequencies = frequencies[positive_freq_mask]

        # 找到主要频率峰值
        peaks, _ = find_peaks(positive_magnitude, height=np.max(positive_magnitude) * 0.1)

        # 转换为周期（时间单位：秒）
        dominant_frequencies = positive_frequencies[peaks]
        dominant_periods = 1 / dominant_frequencies  # 周期单位：秒

        return peaks, dominant_frequencies, dominant_periods, positive_magnitude[peaks]

    @staticmethod
    def __extract_periodicity_autocorr(df):
        """
        使用自相关函数检测周期性
        """
        values = df['ja_li'].values

        # 计算自相关函数
        autocorr = np.correlate(values, values, mode='full')
        autocorr = autocorr[autocorr.size // 2:]

        # 归一化
        autocorr = autocorr / autocorr[0]

        # 寻找峰值以识别周期
        peaks, _ = find_peaks(autocorr, height=0.1, distance=10)

        return peaks, autocorr[peaks]

    @staticmethod
    def __seasonal_decomposition_analysis(df):
        """
        使用季节性分解分析周期性
        """
        # 设置时间为索引
        df_ts = df.set_index('data_time')
        df_ts = df_ts.asfreq('H')  # 假设小时频率，根据实际数据调整

        # 填充缺失值
        df_ts = df_ts.fillna(method='ffill')

        # 季节性分解
        decomposition = seasonal_decompose(df_ts['ja_li'], model='additive', period=24)  # 假设24小时周期

        return decomposition

    @staticmethod
    def __wavelet_analysis(df):
        """
        使用小波分析检测多尺度周期性
        """
        values = df['ja_li'].values

        # 选择小波基函数
        wavelet = pywt.Wavelet('db4')

        # 进行多分辨率分析
        coeffs = pywt.wavedec(values, wavelet, level=5)

        # 分析不同尺度的系数能量
        energies = [np.sum(np.abs(coeff) ** 2) for coeff in coeffs]

        return coeffs, energies

    @staticmethod
    def __extract_periodicity_fft_with_time(df, dominant_period_points):
        """
        基于FFT分析结果，找出周期的开始和结束时间点
        """
        # 获取时间索引
        time_index = df['data_time'].values

        # 根据主要周期长度识别周期的起止时间
        cycle_length = int(dominant_period_points[0]) if len(dominant_period_points) > 0 else 0

        if cycle_length > 0:
            # 找出每个周期的开始和结束时间
            cycle_starts = []
            cycle_ends = []

            for i in range(0, len(time_index), cycle_length):
                if i < len(time_index):
                    cycle_starts.append(time_index[i])
                if i + cycle_length - 1 < len(time_index):
                    cycle_ends.append(time_index[i + cycle_length - 1])

            return cycle_starts, cycle_ends

        return [], []

    @staticmethod
    def __get_cycle_times_from_autocorr(df, autocorr_peaks):
        """
        根据自相关分析结果获取周期时间
        """
        time_index = df['data_time'].values
        data_points = len(time_index)

        cycle_info = []
        for peak in autocorr_peaks:
            # 计算周期时间范围
            cycle_length = peak
            cycles = []

            # 识别每个周期的开始时间
            for i in range(0, data_points, cycle_length):
                if i < data_points:
                    cycle_start = time_index[i]
                    cycle_end_index = min(i + cycle_length - 1, data_points - 1)
                    cycle_end = time_index[cycle_end_index]
                    cycles.append({
                        'start': cycle_start,
                        'end': cycle_end,
                        'duration_points': cycle_length
                    })
            cycle_info.append(cycles)

        return cycle_info

    @staticmethod
    def __detect_cycle_boundaries(df, period_length):
        """
        使用滑动窗口方法检测周期边界
        """
        values = df['ja_li'].values
        time_index = df['data_time'].values

        # 使用移动平均平滑数据
        window_size = max(1, period_length // 4)
        smoothed = pd.Series(values).rolling(window=window_size, center=True).mean().values

        # 寻找局部极值点作为周期边界候选
        peaks, _ = find_peaks(smoothed, distance=period_length // 2)
        troughs, _ = find_peaks(-smoothed, distance=period_length // 2)

        # 合并并排序极值点
        boundaries = np.sort(np.concatenate([peaks, troughs]))

        # 提取对应的时间
        boundary_times = time_index[boundaries] if len(boundaries) < len(time_index) else []

        # 成对组织周期起止时间
        cycle_periods = []
        for i in range(len(boundary_times) - 1):
            cycle_periods.append({
                'start': boundary_times[i],
                'end': boundary_times[i + 1],
                'duration': boundary_times[i + 1] - boundary_times[i]
            })

        return cycle_periods

    @staticmethod
    def comprehensive_periodicity_analysis():
        """
        综合周期性分析
        """
        # 获取数据
        df = YaliTimeService.__get_df()

        # 1. FFT 分析
        fft_peaks, fft_freqs, fft_periods, fft_magnitudes = YaliTimeService.__extract_periodicity_fft(df)

        # 获取FFT检测到的周期的时间信息
        if len(fft_periods) > 0:
            dominant_period_points = fft_periods[0]  # 主要周期长度(点数)
            # 这里需要根据实际采样间隔计算
            cycle_starts, cycle_ends = YaliTimeService.__extract_periodicity_fft_with_time(df, fft_periods)

        # 2. 自相关分析
        autocorr_peaks, autocorr_values = YaliTimeService.__extract_periodicity_autocorr(df)

        # 获取自相关检测到的周期时间信息
        cycle_info_from_autocorr = YaliTimeService.__get_cycle_times_from_autocorr(df, autocorr_peaks)

        # 3. 季节性分解
        decomposition = YaliTimeService.__seasonal_decomposition_analysis(df)

        # 4. 小波分析
        wavelet_coeffs, wavelet_energies = YaliTimeService.__wavelet_analysis(df)

        # 输出分析结果
        logging.info("=== 周期性分析结果 ===")
        logging.info(f"FFT检测到的主要周期: {fft_periods[:5]}")
        logging.info(f"自相关检测到的显著延迟: {autocorr_peaks[:5]}")
        logging.info(f"小波分析各尺度能量: {wavelet_energies}")

        return {
            'df': (df),
            'fft_results': (fft_peaks, fft_periods, fft_magnitudes),
            'autocorr_results': (autocorr_peaks, autocorr_values),
            'decomposition': decomposition,
            'wavelet_results': (wavelet_coeffs, wavelet_energies),
            'cycle_times': {
                'fft_cycle_starts': cycle_starts if 'cycle_starts' in locals() else [],
                'fft_cycle_ends': cycle_ends if 'cycle_ends' in locals() else [],
                'autocorr_cycles': cycle_info_from_autocorr
            }
        }

    @staticmethod
    def visualize_periodicity(analysis_results):
        """
        可视化周期性分析结果
        """
        plt.rcParams['font.sans-serif'] = ['SimHei']
        plt.rcParams['axes.unicode_minus'] = False

        fig, axes = plt.subplots(2, 2, figsize=(15, 10))

        df = analysis_results['df']
        time_index = df['data_time'].values

        # 1. 原始时间序列
        axes[0, 0].plot(df['data_time'], df['ja_li'])

        # 如果有周期时间信息，则标注
        if 'cycle_times' in analysis_results and analysis_results['cycle_times']['fft_cycle_starts']:
            cycle_starts = analysis_results['cycle_times']['fft_cycle_starts']
            for start_time in cycle_starts[:5]:  # 只标注前5个周期
                axes[0, 0].axvline(x=start_time, color='r', linestyle='--', alpha=0.7)

        axes[0, 0].set_title('原始时间序列(红色虚线为周期起始点)')
        axes[0, 0].set_xlabel('时间')
        axes[0, 0].set_ylabel('ja_li')

        # 2. FFT 频谱
        fft_peaks, fft_periods, fft_magnitudes = analysis_results['fft_results']
        axes[0, 1].plot(fft_periods[:20], fft_magnitudes[:20])
        axes[0, 1].set_title('FFT 频谱分析')
        axes[0, 1].set_xlabel('周期')
        axes[0, 1].set_ylabel('幅度')

        # 3. 自相关函数
        autocorr_peaks, autocorr_values = analysis_results['autocorr_results']
        axes[1, 0].plot(autocorr_values[:50])
        axes[1, 0].set_title('自相关函数')
        axes[1, 0].set_xlabel('滞后')
        axes[1, 0].set_ylabel('自相关系数')

        # 4. 季节性分解趋势
        decomposition = analysis_results['decomposition']
        axes[1, 1].plot(decomposition.trend)
        axes[1, 1].set_title('趋势成分')
        axes[1, 1].set_xlabel('时间')
        axes[1, 1].set_ylabel('趋势')

        plt.tight_layout()
        plt.savefig('periodicity_analysis.png', dpi=300, bbox_inches='tight')
        plt.show()
