# D:\project\202509\mediapipe-jump-rope\src\optimized_jump_rope_detector.py

import pandas as pd
import numpy as np
import argparse
from pathlib import Path
import json
from scipy.signal import find_peaks, butter, lfilter
from scipy.ndimage import binary_opening, binary_closing

def find_active_segment_indices_v3(df, fs=30.0, min_len_secs=3.0):
    # (这是正确的、鲁棒的活跃期识别算法)
    high_quantile = 0.80; low_quantile = 0.05
    if 'pose_bbox_height' not in df.columns or df['pose_bbox_height'].isnull().all():
        if 'left_ankle_y' not in df.columns or 'right_ankle_y' not in df.columns: return df.index.min(), df.index.max()
        signal_source = pd.Series(np.abs(df['left_ankle_y'].diff()) + np.abs(df['right_ankle_y'].diff()), index=df.index)
    else: signal_source = df['pose_bbox_height']
    def butter_highpass(cutoff, fs, order=5):
        b, a = butter(order, cutoff / (0.5 * fs), btype='high', analog=False); return b, a
    def highpass_filter(data, cutoff=2.0, fs=fs, order=5):
        b, a = butter_highpass(cutoff, fs, order=order); return lfilter(b, a, data)
    hp_signal = highpass_filter(signal_source.interpolate().fillna(0), fs=fs)
    energy = pd.Series(hp_signal, index=df.index).abs().rolling(window=int(fs), center=True, min_periods=1).mean()
    valid_energy = energy[energy > 0]
    if len(valid_energy) < 20: return df.index.min(), df.index.max()
    high_threshold = valid_energy.quantile(high_quantile); low_threshold = valid_energy.quantile(low_quantile)
    is_active = False; active_segments = []; start_idx = None
    for i, e in energy.items():
        if not is_active and e > high_threshold: is_active = True; start_idx = i
        elif is_active and e < low_threshold:
            is_active = False
            if start_idx is not None:
                if (i - 1 - start_idx) >= (min_len_secs * fs): active_segments.append((start_idx, i - 1))
                start_idx = None
    if start_idx is not None and (energy.index[-1] - start_idx) >= (min_len_secs * fs):
        active_segments.append((start_idx, energy.index[-1]))
    if not active_segments: return df.index.min(), df.index.max()
    longest_segment = max(active_segments, key=lambda s: s[1] - s[0])
    return longest_segment[0], longest_segment[1]

def calculate_independent_drifting_rails(data_series, active_start, active_end, params):
    # (为Y坐标计算智能轨道)
    active_data = data_series.loc[active_start:active_end].dropna()
    if len(active_data) < 50: return None, None
    peaks_idx, _ = find_peaks(active_data, distance=5, prominence=active_data.std() * 0.2)
    valleys_idx, _ = find_peaks(-active_data, distance=5, prominence=active_data.std() * 0.2)
    if len(peaks_idx) < 5 or len(valleys_idx) < 5: return None, None
    peak_values = active_data.iloc[peaks_idx]; valley_values = active_data.iloc[valleys_idx]
    trim_percent = params['trim_percent']; lower_q = (1 - trim_percent) / 2; upper_q = 1 - lower_q
    elite_peaks = peak_values[(peak_values >= peak_values.quantile(lower_q)) & (peak_values <= peak_values.quantile(upper_q))]
    elite_valleys = valley_values[(valley_values >= valley_values.quantile(lower_q)) & (valley_values <= valley_values.quantile(upper_q))]
    golden_upper_level = elite_peaks.median(); golden_lower_level = elite_valleys.median()
    active_center = active_data.median()
    drifting_center = data_series.loc[active_start:active_end].rolling(window=params['smooth_window'], center=True, min_periods=1).mean().reindex(data_series.index)
    upper_offset = golden_upper_level - active_center; lower_offset = golden_lower_level - active_center
    final_upper_rail = drifting_center + upper_offset; final_lower_rail = drifting_center + lower_offset
    golden_amplitude = golden_upper_level - golden_lower_level
    buffer = (golden_amplitude / 2) * params['tolerance_ratio']
    return final_upper_rail + buffer, final_lower_rail - buffer

def get_ankle_amplitudes_and_threshold(data_series, active_start, active_end, params):
    # (为脚踝计算振幅和达标线)
    active_data = data_series.loc[active_start:active_end].dropna()
    if len(active_data) < 50: return None, None
    peaks_idx, _ = find_peaks(active_data, distance=5, prominence=active_data.std() * 0.2)
    valleys_idx, _ = find_peaks(-active_data, distance=5, prominence=active_data.std() * 0.2)
    if len(peaks_idx) < 5 or len(valleys_idx) < 5: return None, None
    all_extrema_idx = sorted(np.concatenate([peaks_idx, valleys_idx]))

    # --- 1. 初步扫描，收集所有半周期时长 ---
    all_durations = []
    for i in range(len(all_extrema_idx) - 1):
        frame1 = active_data.index[all_extrema_idx[i]]
        frame2 = active_data.index[all_extrema_idx[i+1]]
        all_durations.append(frame2 - frame1)
    
    if not all_durations: return None, None

    # --- 2. 计算黄金节奏并设定动态阈值 ---
    durations_series = pd.Series(all_durations)
    # 移除极端异常值，比如只保留10%到90%分位数之间的时长
    q10 = durations_series.quantile(0.10)
    q90 = durations_series.quantile(0.90)
    elite_durations = durations_series[(durations_series >= q10) & (durations_series <= q90)]
    
    if elite_durations.empty:
        golden_duration = durations_series.mean() # 回退到使用普通平均值
    else:
        golden_duration = elite_durations.mean()

    # 动态设定最大允许时长，乘以2作为容忍系数，并设置一个绝对上限(30帧)防止极端情况
    max_jump_duration_frames = min(int(golden_duration * 2.0), 30)
    print(f"信息: {data_series.name} - 黄金跳跃半周期: {golden_duration:.2f} 帧, 动态时长上限: {max_jump_duration_frames} 帧")

    # --- 3. 带动态阈值，正式计算振幅 ---
    amplitudes = []
    amplitude_frames = []
    for i in range(len(all_extrema_idx) - 1):
        frame1 = active_data.index[all_extrema_idx[i]]
        frame2 = active_data.index[all_extrema_idx[i+1]]
        duration = frame2 - frame1

        if duration > max_jump_duration_frames:
            continue # 使用动态阈值进行过滤

        amp = abs(data_series[frame2] - data_series[frame1])
        amplitudes.append(amp)
        amplitude_frames.append(int((frame1 + frame2) / 2))

    if not amplitudes: return None, None
    amplitudes = pd.Series(amplitudes, index=amplitude_frames)
    
    # ... (后续计算 elite_amplitudes 等代码保持不变) ...
    trim_percent = params['trim_percent']; lower_q = (1 - trim_percent) / 2; upper_q = 1 - lower_q
    elite_amplitudes = amplitudes[(amplitudes >= amplitudes.quantile(lower_q)) & (amplitudes <= amplitudes.quantile(upper_q))]
    golden_amplitude = elite_amplitudes.median()
    min_amplitude_threshold = golden_amplitude * params['min_amplitude_ratio']
    return amplitudes, min_amplitude_threshold, golden_amplitude, golden_duration

def morphological_post_processing(is_normal_series, open_size=3, close_size=11):
    # (形态学后处理函数)
    opening_structure = np.ones(open_size)
    closing_structure = np.ones(close_size)
    opened_series = binary_opening(is_normal_series.values, structure=opening_structure)
    closed_series = binary_closing(opened_series, structure=closing_structure)
    return pd.Series(closed_series, index=is_normal_series.index)

def validate_signal_rhythm(data_series, golden_duration, tolerance=0.50, fs=30.0):
    """
    【V4 - 峰谷对周期版】
    基于交替出现的波峰和波谷来验证节律，完全解耦幅度和周期。
    这是最鲁棒的节律验证器。
    """
    if data_series.isnull().all() or len(data_series.dropna()) < 20:
        return pd.Series(True, index=data_series.index)

    # 使用非常灵敏的参数找到所有可能的波峰和波谷
    peaks_idx, _ = find_peaks(data_series, distance=3, prominence=data_series.std() * 0.05)
    valleys_idx, _ = find_peaks(-data_series, distance=3, prominence=data_series.std() * 0.05)
    
    # 合并并排序所有极值点
    all_extrema_idx = sorted(np.concatenate([peaks_idx, valleys_idx]))
    
    if len(all_extrema_idx) < 3:
        return pd.Series(True, index=data_series.index)

    extrema_frames = data_series.index[all_extrema_idx]
    
    # 核心：计算相邻极值点（峰-谷 或 谷-峰）之间的间隔，即半个周期
    half_period_intervals = np.diff(extrema_frames)
    interval_midpoints = extrema_frames[:-1] + half_period_intervals / 2

    # 比较半周期，而不是完整周期
    lower_bound = golden_duration * (1 - tolerance)
    upper_bound = golden_duration * (1 + tolerance)
    is_rhythmic_interval = (half_period_intervals >= lower_bound) & (half_period_intervals <= upper_bound)
    
    rhythm_quality = pd.Series(is_rhythmic_interval.astype(int), index=interval_midpoints)
    
    # 窗口大小设为5，即考察连续2.5个完整周期的稳定性
    window_size = 5 
    local_quality_score = rhythm_quality.rolling(window=window_size, center=True, min_periods=1).mean()
    
    expanded_score = local_quality_score.reindex(data_series.index).bfill().ffill()

    rhythm_ok = expanded_score > 0.6 # 好评率阈值可以设得略高一些，比如60%
    
    return rhythm_ok

class UltimateJumpRopeDetector:
    def __init__(self, params):
        self.params = params

    def analyze(self, csv_path):
        golden_durations = {}
        print(f"正在使用终极算法分析: {csv_path}")
        df = pd.read_csv(csv_path)
        try: fs = 1 / (df['timestamp'].diff().mean())
        except: fs = 30.0
        
        valid_frames = df[df['has_pose'] == True].set_index('frame_number')
        if len(valid_frames) < 100: return {'detected_jumps': 0, 'jump_frames': []}
        
        # 1. 识别总活跃期
        # active_start, active_end = find_active_segment_indices_v3(valid_frames, fs=fs)
        # active_data = valid_frames.loc[active_start:active_end]
        # print(f"自动识别的总活跃期: 帧 {active_start} 到 {active_end}")
        active_start = valid_frames.index.min()
        active_end = valid_frames.index.max()
        active_data = valid_frames # active_data 现在就是全部有效帧
        print(f"模式更新: 使用全视频范围进行分析: 帧 {active_start} 到 {active_end}")

        # 2. 计算所有独立的验证标准
        is_normal_df = pd.DataFrame(index=active_data.index, dtype=bool)
        
        parts_to_validate = {
            'com_y_normal': {'col': 'center_of_mass_y', 'type': 'com'},
            'lwrist_y_normal': {'col': 'left_wrist_y', 'type': 'wrist'},
            'rwrist_y_normal': {'col': 'right_wrist_y', 'type': 'wrist'},
            'lankle_y_normal': {'col': 'left_ankle_y', 'type': 'ankle'},
            'rankle_y_normal': {'col': 'right_ankle_y', 'type': 'ankle'}
        }
        
        # 2a. Y坐标位置验证
        for name, props in parts_to_validate.items():
            col_name = props['col']
            if col_name not in active_data.columns: continue
            data_series = active_data[col_name]
            part_params = {**self.params['default'], **self.params[props['type']]}
            up, low = calculate_independent_drifting_rails(data_series, active_start, active_end, part_params)
            if up is not None:
                is_normal_raw = (data_series >= low) & (data_series <= up)
                is_normal_df[name] = morphological_post_processing(is_normal_raw, **self.params['post_processing'])

        # 2b. 脚踝振幅达标验证
        for ankle_label, col_name in [('左脚踝', 'left_ankle_y'), ('右脚踝', 'right_ankle_y')]:
            if col_name not in active_data.columns: continue
            data_series = active_data[col_name]
            part_params = {**self.params['default'], **self.params['ankle']}
            amplitudes, threshold, _, golden_duration = get_ankle_amplitudes_and_threshold(data_series, active_start, active_end, part_params)
            
            if golden_duration: # <--- 新增：存储计算出的黄金节奏
                golden_durations[col_name] = golden_duration
            is_normal_df[f'{ankle_label} 振幅达标'] = False
            if amplitudes is not None:
                # 核心修改：实现双向填充，以振幅中心点为基准，向前和向后各扩展一段时间
                # 设定一个跳跃动作的总持续时间，例如0.3秒，这是一个比较鲁棒的值
                if golden_duration and golden_duration > 0:
                    # 动态计算半个窗口的长度：
                    # 理想值为 golden_duration (半周期)，乘以1.2作为容忍系数
                    # 设置上下限(3到12帧)以保证鲁棒性
                    half_limit = int(np.clip(golden_duration * 1.2, 3, 12))
                else:
                    # 如果没有学到节奏，则使用一个安全的回退值
                    half_limit = 5 # 约等于 0.33秒 @ 30fps 的一半
                
                print(f"信息: {col_name} - 动态填充半周期: {half_limit} 帧")

                amp_ok = (amplitudes >= threshold).reindex(active_data.index) \
                                                  .bfill(limit=half_limit) \
                                                  .ffill(limit=half_limit) \
                                                  .fillna(False)

                is_normal_df[f'{ankle_label} 振幅达标'] = morphological_post_processing(amp_ok, **self.params['post_processing'])

        # 3. 全局最终判定
        is_normal_df.fillna(True, inplace=True)

        # --- 核心修改：将脚踝振幅的 AND 判断改为 OR 判断 ---
        # 检查列是否存在，以防万一
        left_amp_col = '左脚踝 振幅达标'
        right_amp_col = '右脚踝 振幅达标'
        if left_amp_col in is_normal_df.columns and right_amp_col in is_normal_df.columns:
            # 创建一个新列，代表“任意一只脚踝振幅达标”
            is_normal_df['any_ankle_amp_ok'] = is_normal_df[left_amp_col] | is_normal_df[right_amp_col]
            # 从DataFrame中移除原始的两列，这样它们就不会参与下面的 .all() 判断
            is_normal_df.drop(columns=[left_amp_col, right_amp_col], inplace=True)
        # --- 修改结束 ---
        if golden_durations:
            # 使用最稳定的节奏作为基准
            base_duration = min(golden_durations.values())
            active_data = valid_frames # 确保 active_data 在此作用域可用
            
            for wrist_label, col_name in [('左手腕节律', 'left_wrist_y'), ('右手腕节律', 'right_wrist_y')]:
                if col_name not in active_data.columns: continue
                data_series = active_data[col_name]
                
                # 调用新函数进行节律验证
                rhythm_ok = validate_signal_rhythm(data_series, golden_duration=base_duration, tolerance=0.50, fs=fs)
                
                # 应用形态学处理并加入最终判定
                is_normal_df[wrist_label] = morphological_post_processing(rhythm_ok, open_size=3, close_size=7)
                
        final_normal_mask = is_normal_df.all(axis=1)
        
        # 4. 在最终的“纯净正常区”内提名并计数
        all_jumps = []
        normal_segments = []
        start_idx = None
        if golden_durations:
            base_duration = min(golden_durations.values())
            min_segment_len = max(5, int((base_duration * 2) * 1.5))
        else:
            min_segment_len = 10
        # --- 修改结束 ---

        for frame, normal in final_normal_mask.loc[active_start:active_end].items():
            if normal and start_idx is None: start_idx = frame
            elif not normal and start_idx is not None:
                if frame - 1 - start_idx > min_segment_len: # <--- 使用动态值
                    normal_segments.append((start_idx, frame - 1))
                start_idx = None
        if start_idx is not None and (final_normal_mask.index[-1] - start_idx) > min_segment_len: # <--- 使用动态值
            normal_segments.append((start_idx, final_normal_mask.index[-1]))
        
        print(f"在活跃期内识别出 {len(normal_segments)} 个最终正常区: {normal_segments}")

        for seg_start, seg_end in normal_segments:
            seg_data = valid_frames.loc[seg_start:seg_end]
            if len(seg_data) < 5: continue
            l_ankle = seg_data['left_ankle_y']; r_ankle = seg_data['right_ankle_y']
            # 获取之前存储的黄金节奏，如果找不到则使用一个安全的回退值(e.g., 5)
            l_g_dur = golden_durations.get('left_ankle_y', 5) 
            r_g_dur = golden_durations.get('right_ankle_y', 5)

            # 动态计算 distance，并设置一个最小值为3，防止过于敏感
            l_dist = max(3, int((l_g_dur * 2) * 0.7))
            r_dist = max(3, int((r_g_dur * 2) * 0.7))
            
            peak_prom = l_ankle.std() * 0.20

            print(f"信息: left_ankle 动态 distance={l_dist}, right_ankle 动态 distance={r_dist}")
            l_peaks, _ = find_peaks(-l_ankle, distance=l_dist, prominence=l_ankle.std() * 0.35)
            r_peaks, _ = find_peaks(-r_ankle, distance=r_dist, prominence=r_ankle.std() * 0.35)
            
            seg_jumps = sorted(list(set(np.concatenate([l_ankle.index[l_peaks], r_ankle.index[r_peaks]]))))
            if not seg_jumps: continue
                
            merged_jumps = [seg_jumps[0]]
            for i in range(1, len(seg_jumps)):
                if seg_jumps[i] - merged_jumps[-1] > 3:
                    merged_jumps.append(seg_jumps[i])
            all_jumps.extend(merged_jumps)

        total_jumps = len(all_jumps)
        print(f"\n分析完成。最终检测到总跳绳次数: {total_jumps}")
        
        jump_frames_output = [{"jump_number": i + 1, "frame": int(frame)} for i, frame in enumerate(all_jumps)]
        
        return {
            'detected_jumps': total_jumps,
            'detection_method': 'Ultimate Segment-Adaptive V_Final',
            'active_segment': [int(active_start), int(active_end)],
            'normal_segments': [[int(s), int(e)] for s, e in normal_segments],
            'jump_frames': jump_frames_output
        }

def main():
    parser = argparse.ArgumentParser(description='终极跳绳检测算法')
    parser.add_argument('--csv', required=True, help='CSV文件路径')
    parser.add_argument('--output', help='输出JSON文件的路径')
    args = parser.parse_args()

    PARAMS = {
        'default': {'trim_percent': 0.8,'smooth_window': 31},
        'wrist': {'tolerance_ratio': 2.0},
        'ankle': {'tolerance_ratio': 2.0, 'min_amplitude_ratio': 0.5},
        'com': {'tolerance_ratio': 2},
        'post_processing': {'open_size': 5, 'close_size': 11}
    }
    
    detector = UltimateJumpRopeDetector(PARAMS)
    result = detector.analyze(args.csv)
    
    csv_path = Path(args.csv)
    if args.output: output_path = Path(args.output)
    else: output_path = csv_path.parent / f"{csv_path.stem}_ultimate_result.json"
    
    with open(output_path, 'w', encoding='utf-8') as f:
        json.dump(result, f, ensure_ascii=False, indent=2)
    
    print(f"\n结果已保存到: {output_path}")
    print(f"最终检测结果: {result.get('detected_jumps', 0)} 次跳绳")

if __name__ == "__main__":
    main()