import 'dart:io';

import 'package:audio_session/audio_session.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter_easyloading/flutter_easyloading.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:flutter_sound_platform_interface/flutter_sound_recorder_platform_interface.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:path_provider/path_provider.dart';

class SjRecordUtil {

  factory SjRecordUtil() => _getInstance();
  static SjRecordUtil get instance => _getInstance();
  static SjRecordUtil? _instance;
  SjRecordUtil._internal() {
    init();
  }
  static SjRecordUtil _getInstance() {
    _instance ??= SjRecordUtil._internal();
    return _instance!;
  }

  FlutterSoundRecorder? _recorder;

  Duration? duration;

  /// 初始化
  Future init() async {
    _recorder = FlutterSoundRecorder();
    await openTheRecorder();


    _player = FlutterSoundPlayer();

    await _player?.openPlayer().then((value) {
    });
    await _player?.setSubscriptionDuration(const Duration(milliseconds: 100));
    _player?.onProgress?.listen((event) {
    });
  }

  /// 释放
  dispose() {
    _recorder?.closeRecorder();
    _recorder = null;
  }

  Future<void> openTheRecorder() async {
    var status = await Permission.microphone.request();
    if (status != PermissionStatus.granted) {
      return;
    }
    await _recorder?.openRecorder();
    final session = await AudioSession.instance;
    await session.configure(AudioSessionConfiguration(
      avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
      avAudioSessionCategoryOptions:
          AVAudioSessionCategoryOptions.allowBluetooth | AVAudioSessionCategoryOptions.defaultToSpeaker,
      avAudioSessionMode: AVAudioSessionMode.spokenAudio,
      avAudioSessionRouteSharingPolicy: AVAudioSessionRouteSharingPolicy.defaultPolicy,
      avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
      androidAudioAttributes: const AndroidAudioAttributes(
        contentType: AndroidAudioContentType.speech,
        flags: AndroidAudioFlags.none,
        usage: AndroidAudioUsage.voiceCommunication,
      ),
      androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
      androidWillPauseWhenDucked: true,
    ));

    _recorder?.dispositionStream()?.listen((event) {
      print('debug dispositionStream：$event');
    });

    _recorder?.setSubscriptionDuration(const Duration(milliseconds: 100));
    _recorder?.onProgress?.listen((e) {
      print("debug onProgress：${e.decibels} / ${e.duration}");
      duration = e.duration;
    });
    // _mRecorderIsInited = true;
  }

  void startRecord() async {
    if (_recorder == null) {
      await init();
    }
    if (_recorder?.recorderState == RecorderState.isRecording) {
      await _recorder?.stopRecorder();
      return;
    }
    var status = await Permission.microphone.request();
    if (status != PermissionStatus.granted) {
      print("Microphone permission not granted");
    } else {
      Directory tempDir = await getTemporaryDirectory();
      String _mPath = "${tempDir.path}/${DateTime.now().millisecondsSinceEpoch}.aac";
      _recorder?.startRecorder(
        toFile: _mPath,
        codec: Codec.aacADTS,
        audioSource: AudioSource.microphone,
      );

      print("debug recording");
    }
  }

  void stopRecord(Function(String path, int duration) finished) async {
    String? path = await _recorder?.stopRecorder();
    if (path == null) {
      EasyLoading.showToast('record failed  ${_recorder?.recorderState}');
      return;
    }
    if (!File(path).existsSync()) {
      EasyLoading.showToast('record failed');
      return;
    }
    if (duration != null && duration!.inSeconds < 1) {
      EasyLoading.showToast('recording can\'t be less than 1 second');
      return;
    }
    print("Stop recording: path = $path，duration = ${duration?.inSeconds}");
    finished(path, duration?.inSeconds ?? 0);
  }


  FlutterSoundPlayer? _player;

  Future play(String url, [Function()? onFinished]) async {
    if (_player == null) {
      await init();
    }
    if (_player?.playerState == PlayerState.isPlaying) {
      await stop();
    }
    return _player?.startPlayer(fromURI: url, codec: Codec.mp3, whenFinished: onFinished);
  }

  Future stop() async {
    await _player?.stopPlayer();
  }

}
