import 'dart:async';
import 'dart:typed_data';

import 'package:fl_channel/fl_channel.dart';
import 'package:flutter/services.dart';

const MethodChannel _channel = MethodChannel('system.recording');

class AudioDescription {
  /// 录音时间
  int milliseconds = 0;

  /// 原始数据
  List<int> originalData = [];

  /// 振幅
  List<List<double>> amplitude = [];
}

/// Audio 来源
enum AudioSource {
  /// 外录音
  external,

  /// 系统录音
  system
}

typedef SystemRecordingOnChanged = void Function(AudioDescription audio);
typedef SystemRecordingStateOnChanged = void Function(bool isRecording);

class SystemRecording {
  factory SystemRecording() => _singleton ??= SystemRecording._();

  SystemRecording._();

  static SystemRecording? _singleton;
  bool _isRecording = false;

  bool get isRecording => _isRecording;

  /// 初始化 前台任务 和录音工具
  Future<bool> initialize({AudioSource source = AudioSource.system}) async {
    _audioDescription = null;
    final flEvent = await FlChannel().initFlEvent();
    flEvent?.listen(_onData, onError: _onError, onDone: _onDone);
    final result = await _channel
        .invokeMethod<bool>('initialize', {'source': source.index});
    return flEvent != null && (result ?? false);
  }

  /// 请求忽略电池优化
  Future<bool> requestIgnoreBatteryOptimizations() async {
    final result =
        await _channel.invokeMethod<bool>('requestIgnoreBatteryOptimizations');
    return result ?? false;
  }

  /// 开始录音 停止后可重新开启录音
  Future<bool> startRecording() async {
    final result = await _channel.invokeMethod<bool>('startRecording');
    return result ?? false;
  }

  /// 停止录音 开启录音后可停止录音
  Future<bool> stopRecording() async {
    final result = await _channel.invokeMethod<bool>('stopRecording');
    return result ?? false;
  }

  /// 完全注销录音和前台任务
  Future<bool> dispose() async {
    final result = await _channel.invokeMethod<bool>('dispose');
    Future.delayed(const Duration(seconds: 1), () {
      FlChannel().disposeFlEvent();
    });
    return result ?? false;
  }

  void _onError(dynamic error) {
    _isRecording = false;
  }

  void _onDone() {
    _isRecording = false;
  }

  AudioDescription? _audioDescription;
  SystemRecordingOnChanged? _systemRecordingOnChanged;
  SystemRecordingStateOnChanged? _systemRecordingStateOnChanged;

  /// 数据流监听
  void onChanged(SystemRecordingOnChanged onChanged) {
    _systemRecordingOnChanged = onChanged;
  }

  /// 状态变化监听
  void onStateChanged(SystemRecordingStateOnChanged onChanged) {
    _systemRecordingStateOnChanged = onChanged;
  }

  void _onData(data) {
    _audioDescription ??= AudioDescription();
    if (data is Map) {
      final byte = data['byte'] as List<int>;
      _audioDescription?.milliseconds = data['timeMillis'] as int;
      _audioDescription?.originalData = byte;
      final amplitude = pcmToAmplitude(byte);
      _audioDescription?.amplitude = amplitude;
      _systemRecordingOnChanged?.call(_audioDescription!);
    } else if (data is bool) {
      _isRecording = data;
      _systemRecordingStateOnChanged?.call(_isRecording);
    }
  }

  List<List<double>> pcmToAmplitude(List<int> pcmData) {
    const sampleWidth = 2; // 假设为 16 位采样
    const channels = 1; // 假设为单声道

    final frameCount = pcmData.length ~/ (sampleWidth * channels);
    final pcmFrames = ByteData.view(Uint8List.fromList(pcmData).buffer);

    final amplitudeFrames = <List<double>>[];
    for (var i = 0; i < frameCount; i++) {
      final value = pcmFrames.getInt16(i * sampleWidth, Endian.little);
      final amplitude = value / 32768.0; // 除以 2^15，归一化到 [-1, 1] 范围
      amplitudeFrames.add([amplitude]);
    }
    return amplitudeFrames;
  }
}
