import 'dart:async';

import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_chat_ui/pages/onnx/llm_utils/llm_record.dart';
import 'package:flutter_chat_ui/pages/onnx/llm_utils/llm_utils.dart';
import 'package:flutter_chat_ui/pages/onnx/onnx_mask_widgets/agent_mask_view.dart';
import 'package:flutter_chat_ui/pages/onnx/onnx_widgets_notifier_controller.dart';
import 'package:flutter_chat_ui/pages/onnx/onnx_permission/onnx_permission_tools.dart';
import 'package:flutter_chat_ui/utils/x_toast.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:logger/web.dart';
import 'package:record/record.dart';
import 'package:sherpa_onnx/sherpa_onnx.dart';

/// 按住说话按钮
class OnnxVoiceButtonView extends StatefulWidget {
  const OnnxVoiceButtonView({
    super.key,
  });

  @override
  State<OnnxVoiceButtonView> createState() => _OnnxVoiceButtonViewState();
}

class _OnnxVoiceButtonViewState extends State<OnnxVoiceButtonView> {
  static const int _minRecordDuration = 1000; // 最小录制时长(ms)
  static const int _audioBatchSize = 3; // 音频批处理大小

  // 核心组件
  final _notifireController = OnnxWidgetsNotifireController.to;

  final _llmUtils = LLMUtils();
  final _llmRecord = LLMRecord();

  // 权限状态
  bool _hasMic = false;
  bool _hasSpeech = false;
  bool _hasPermissions = false;
  RecordConfig? _recordConfig;

  // 录制状态
  XRecordStatus _currentRecordStatus = XRecordStatus.idle;
  StreamSubscription<List<int>>? _audioStreamSub;
  DateTime? _recordStartTime;
  String _lastVoiceText = '';

  // UI状态
  OverlayEntry? _overlayEntry;
  bool _isOverlayVisible = false;
  bool _isLongPressTriggered = false;
  Offset _lastPosition = Offset.zero;

  @override
  void initState() {
    super.initState();
    _initializeAsync();
  }

  @override
  void dispose() {
    // 确保在dispose前清理所有资源
    _stopRecording();
    _audioStreamSub?.cancel();
    _audioStreamSub = null;

    // 安全地移除overlay
    _safeRemoveOverlay();

    _llmUtils.dispose();
    super.dispose();
  }

  @override
  Widget build(BuildContext context) {
    return GestureDetector(
      onTapUp: (_) => _handleTapUp(),
      onPanDown: (_) => _handlePanDown(),
      onPanStart: _handlePanStart,
      onPanUpdate: _handlePanUpdate,
      onPanEnd: _handlePanEnd,
      onLongPressStart: (_) => _handleLongPressStart(),
      onLongPressMoveUpdate: _handleLongPressMove,
      onLongPressEnd: _handleLongPressEnd,
      child: Container(
        height: 44,
        decoration: BoxDecoration(
          borderRadius: BorderRadius.circular(6),
          color: Colors.white,
        ),
        child: const Center(
          child: Text(
            '按住说话',
            style: TextStyle(
              color: Color(0xFF666666),
              fontSize: 16,
            ),
          ),
        ),
      ),
    );
  }

  /// 异步初始化
  void _initializeAsync() async {
    try {
      _hasMic = await OnnxPermissionTools.hasMicrophone();
      _hasSpeech = await OnnxPermissionTools.hasSpeechRecognition();
      _hasPermissions = _hasMic && _hasSpeech;

      // if (await widget.audioRecorder?.hasPermission()) {
      //   _recordConfig = const RecordConfig(
      //     encoder: AudioEncoder.pcm16bits,
      //     sampleRate: sampleRate,
      //     numChannels: 1,
      //   );

      //   /// 开始录音并监听，直接暂停处理
      //   final recognizer = _llmUtils.recognizer!;
      //   final stream = _llmUtils.speechStream!;
      //   final audioBatch = <List<int>>[];
      //   try {
      //     final audioStream = await widget.audioRecorder?.startStream(_recordConfig!);
      //     // 监听音频流
      //     _audioStreamSub = audioStream.listen(
      //       (data) {
      //         audioBatch.add(data);
      //         if (audioBatch.length >= _audioBatchSize) {
      //           _processAudioBatch(List.from(audioBatch), recognizer, stream);
      //           audioBatch.clear();
      //         }
      //       },
      //       onError: (e) => Logger().e('音频流错误: $e'),
      //     );
      //   } catch (e) {
      //     Logger().e('录音启动失败: $e');
      //     XToast.showError('录音启动失败');
      //   } finally {
      //     _pauseRecording(); // 立即暂停录音，等待用户操作
      //   }
      // }

      final recognizer = _llmUtils.recognizer!;
      final stream = _llmUtils.speechStream!;
      final audioBatch = <List<int>>[];

      /// 监听录音
      _audioStreamSub = _llmRecord.audioStream?.listen(
        (data) {
          audioBatch.add(data);
          if (audioBatch.length >= _audioBatchSize) {
            _processAudioBatch(List.from(audioBatch), recognizer, stream);
            audioBatch.clear();
          }
        },
        onError: (e) => Logger().e('音频流错误: $e'),
      );

      /// 等待LLM初始化
      await _waitForLLMInitialization();
    } catch (e) {
      Logger().e('初始化失败: $e');
    }
  }

  /// 等待LLM初始化
  Future<void> _waitForLLMInitialization() async {
    const timeout = Duration(seconds: 30);
    final startTime = DateTime.now();

    while (!_llmUtils.isInitialized && DateTime.now().difference(startTime) < timeout) {
      if (!_llmUtils.isInitializing) break;
      await Future.delayed(const Duration(milliseconds: 100));
    }
  }

  /// 手势处理 - 统一入口
  void _handleTapUp() async {
    if (!_hasPermissions || !mounted) return;
    await Future.delayed(const Duration(milliseconds: 400));
    if (!mounted) return;
    _removeOverlay();
    await Future.delayed(const Duration(milliseconds: 250));
    if (!mounted) return;
    XToast.showInfo('说话时间太短');
  }

  void _handlePanDown() {
    HapticFeedback.heavyImpact();

    if (_isNotAuth()) return;
    if (!_isLLMReady()) return;

    _showOverlay();
    _recordStartTime = DateTime.now();
  }

  void _handlePanStart(DragStartDetails details) {
    if (!mounted || !_isOverlayVisible || _isLongPressTriggered) return;
    _startRecording();
  }

  void _handlePanUpdate(DragUpdateDetails details) {
    if (!mounted || _isLongPressTriggered) return;
    _updatePositionAndStatus(details.globalPosition);
  }

  void _handlePanEnd(DragEndDetails details) async {
    if (!mounted || !_hasPermissions || _isLongPressTriggered) return;
    await _finishRecording(_isInRecordingArea(_lastPosition), finishedFun: 'PanEnd');
  }

  void _handleLongPressStart() {
    if (!mounted || !_hasPermissions || !_isLLMReady()) return;
    _isLongPressTriggered = true;
    _recordStartTime ??= DateTime.now();
    _startRecording();
  }

  void _handleLongPressMove(LongPressMoveUpdateDetails details) {
    if (!mounted || !_isOverlayVisible || !_isLongPressTriggered) return;
    _updatePositionAndStatus(details.globalPosition);
  }

  void _handleLongPressEnd(LongPressEndDetails details) async {
    if (!mounted || !_isLongPressTriggered) return;
    await _finishRecording(_isInRecordingArea(details.globalPosition), finishedFun: 'LongPressEnd');
  }

  /// 录制控制
  void _startRecording() {
    if (!_isLLMReady() || _recordConfig == null) return;

    _resumeRecording();
  }

  void _processAudioBatch(List<List<int>> batch, OnlineRecognizer recognizer, OnlineStream stream) {
    Future.microtask(() {
      try {
        for (final data in batch) {
          final samples = _llmUtils.convertBytesToFloat32(Uint8List.fromList(data));
          stream.acceptWaveform(samples: samples, sampleRate: sampleRate);

          while (recognizer.isReady(stream)) {
            recognizer.decode(stream);
          }
        }

        final currentText = recognizer.getResult(stream).text;
        if (currentText.isNotEmpty) {
          final displayText = _lastVoiceText.isEmpty ? currentText : '$_lastVoiceText，$currentText';

          if (recognizer.isEndpoint(stream)) {
            recognizer.reset(stream);
            _lastVoiceText = displayText;
          }

          if (mounted) {
            Future.microtask(() => _notifireController.updateRecognizedTextWithTypewriterEffect(displayText));
          }
        }
      } catch (e) {
        Logger().e('音频处理错误: $e');
      }
    });
  }

  void _pauseRecording() {
    if (!(_audioStreamSub?.isPaused ?? true)) {
      _audioStreamSub?.pause();
    }
    _llmRecord.pauseRecord().then((_) {
      if (_isLLMReady()) {
        _llmUtils.recognizer!.reset(_llmUtils.speechStream!);
      }
    }).catchError((e) {
      Logger().e('暂停录制错误: $e');
      XToast.showError('暂停录制失败');
    });
  }

  void _resumeRecording() {
    if (_audioStreamSub?.isPaused ?? true) {
      _audioStreamSub?.resume();
    }
    _llmRecord.resumeRecord().catchError((e) {
      Logger().e('恢复录制错误: $e');
      XToast.showError('恢复录制失败');
    });
  }

  void _stopRecording() {
    _llmRecord.stopRecord().then((_) {}).catchError((e) {
      Logger().e('停止录制错误: $e');
    });
  }

  void _updateRecordStatus(
    XRecordStatus status, {
    bool isInRecordingArea = true,
  }) {
    if (_currentRecordStatus == status || !mounted) return;

    _currentRecordStatus = status;
    _notifireController.updateRecordStatus(status);

    switch (status) {
      case XRecordStatus.resumeRecord:
        _resumeRecording();
        break;
      case XRecordStatus.pausedRecord:
        _pauseRecording();
        break;
      default:
        break;
    }
  }

  /// UI控制
  void _showOverlay() {
    if (_isOverlayVisible || !mounted) return;

    final overlay = Overlay.maybeOf(context);
    if (overlay == null) return;

    _overlayEntry ??= OverlayEntry(builder: (_) => const OnnxMaskView());

    Future.microtask(() {
      // 双重检查确保widget仍然mounted且overlay仍然可用
      if (mounted && _overlayEntry != null && !_isOverlayVisible) {
        final currentOverlay = Overlay.maybeOf(context);
        if (currentOverlay != null) {
          try {
            currentOverlay.insert(_overlayEntry!);
            _isOverlayVisible = true;
          } catch (e) {
            Logger().e('插入overlay失败: $e');
            _overlayEntry = null;
          }
        }
      }
    });
  }

  void _removeOverlay() {
    if (!_isOverlayVisible) return;
    _safeRemoveOverlay();
    _resetState();
  }

  /// 安全地移除overlay，避免在页面销毁后访问overlay
  void _safeRemoveOverlay() {
    if (_overlayEntry != null && _isOverlayVisible) {
      try {
        // 检查context是否仍然有效
        if (mounted) {
          final overlay = Overlay.maybeOf(context);
          if (overlay != null) {
            _overlayEntry?.remove();
          }
        } else {
          // 如果widget已经unmounted，直接标记为已移除
          _overlayEntry?.dispose();
        }
      } catch (e) {
        // 捕获任何可能的异常，避免崩溃
        Logger().w('移除overlay时发生错误: $e');
      } finally {
        _overlayEntry = null;
        _isOverlayVisible = false;
      }
    }
  }

  /// 工具方法
  bool _isLLMReady() {
    if (_llmUtils.isInitializing) {
      XToast.showInfo('语音识别正在初始化中，请稍后再试');
      return false;
    }
    if (!_llmUtils.isInitialized || _llmUtils.recognizer == null || _llmUtils.speechStream == null) {
      XToast.showError('语音识别未就绪');
      return false;
    }
    return true;
  }

  bool _isInRecordingArea(Offset position) {
    return position.dy >= (ScreenUtil().screenHeight - kOnnxVoiceBgOvalHeight);
  }

  void _updatePositionAndStatus(Offset position) {
    _lastPosition = position;

    final isInRecordingArea = _isInRecordingArea(position);
    final newStatus = isInRecordingArea ? XRecordStatus.resumeRecord : XRecordStatus.pausedRecord;

    if (_currentRecordStatus != newStatus) {
      _updateRecordStatus(newStatus, isInRecordingArea: isInRecordingArea);
    }
  }

  Future<void> _finishRecording(
    bool inRecordingArea, {
    required String finishedFun,
  }) async {
    if (inRecordingArea) {
      final duration = _recordStartTime != null ? DateTime.now().difference(_recordStartTime!) : Duration.zero;

      if (duration.inMilliseconds < _minRecordDuration) {
        if (mounted) XToast.showInfo('说话时间太短');
      } else {
        if (mounted) XToast.showInfo('$finishedFun：语音发送成功');
      }
    } else {
      if (mounted) XToast.showInfo('$finishedFun：录音已取消');
    }

    _pauseRecording();
    await Future.delayed(const Duration(milliseconds: 200));
    if (mounted) {
      _removeOverlay();
    }
  }

  void _resetState() {
    _isOverlayVisible = false;
    _isLongPressTriggered = false;
    _recordStartTime = null;
    _lastVoiceText = '';

    // 安全地清理状态，避免在widget销毁后调用
    if (mounted) {
      _notifireController.clearRecognizedText();
      _updateRecordStatus(XRecordStatus.idle);
    }
  }

  bool _isNotAuth() {
    return !OnnxPermissionTools.checkPermissions(
      context,
      hasMicrophone: _hasMic,
      hasSpeechRecognition: _hasSpeech,
      appName: "Dify AI Chat",
    );
  }
}
