import 'dart:io';

import 'package:audio_session/audio_session.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:flutter_sound/public/flutter_sound_recorder.dart';
import 'package:flutter_sound_platform_interface/flutter_sound_recorder_platform_interface.dart';

import '../../../../util/DateUtil.dart';
import '../../../../util/FileUtil.dart';
import '../../../../util/ImSingleton.dart';
import '../../../../util/LoggerUtil.dart';

class AudioRecordService {
  final _logger = createLogger();

  final theSource = AudioSource.microphone;

  // 语音保存至文件中
  String tmpFile = '';
  DateTime? recordStartDate = null;
  int time = 0;
  FlutterSoundRecorder? mRecorder = null;

  AudioRecordService() {
    var userId = ImSingleton.getInstance().session.userId;
    tmpFile = "/$userId/capture/audio/${DateUtil.formatSimpleDate(DateTime.now())}.wav";
    var rootDir = ImSingleton.getInstance().rootDir;
    var tmpDir =  "$rootDir/$userId/capture/audio/";
    FileUtil.mkDirs(tmpDir);
  }

  // 开始录制对应的流程
  startRecord() async {
    try {
      if (mRecorder != null) {
        await mRecorder!.closeRecorder();
        mRecorder = null;
      }
      Codec codec = Codec.pcm16WAV;
      mRecorder = FlutterSoundRecorder();
      await mRecorder!.openRecorder();
      var isAac = await mRecorder?.isEncoderSupported(codec);
      _logger.i('[audio record start, $isAac]');

      final session = await AudioSession.instance;
      await session.configure(AudioSessionConfiguration(
        avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
        avAudioSessionCategoryOptions:
        AVAudioSessionCategoryOptions.allowBluetooth |
        AVAudioSessionCategoryOptions.defaultToSpeaker,
        avAudioSessionMode: AVAudioSessionMode.spokenAudio,
        avAudioSessionRouteSharingPolicy:
        AVAudioSessionRouteSharingPolicy.defaultPolicy,
        avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
        androidAudioAttributes: const AndroidAudioAttributes(
          contentType: AndroidAudioContentType.speech,
          flags: AndroidAudioFlags.none,
          usage: AndroidAudioUsage.voiceCommunication,
        ),
        androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
        androidWillPauseWhenDucked: true,
      ));

      recordStartDate = DateTime.now();

      String rootDir = ImSingleton.getInstance().rootDir;
      var filePath = rootDir + tmpFile;

      mRecorder!.startRecorder(
        toFile: filePath,
        codec: codec,
        audioSource: theSource,
        sampleRate: 48000,
        bitRate: 100000,
        numChannels: 2,
      );
    } catch (error, stackTrace) {
      _logger.e('[audio record start error]', error: error, stackTrace: stackTrace);
    }
  }

  // 暂停录制对应的流程
  pauseRecord() async {
    if (mRecorder != null && mRecorder!.isRecording) {
      // 仅在started状态下调用pause为合理状态切换
      if (recordStartDate != null) {
        time += (DateTime.now().millisecond - recordStartDate!.millisecond);
      }
      await mRecorder!.pauseRecorder();
    }
  }

  // 恢复录制对应的流程
  resumeRecord() async {
    if (mRecorder != null && mRecorder!.isPaused) {
      // 仅在paused状态下调用resume为合理状态切换
      recordStartDate = DateTime.now();
      await mRecorder!.resumeRecorder();
    }
  }

  // 停止录制对应的流程
  stopRecord() async {
    if (mRecorder != null) {
      if (recordStartDate != null) {
        time += (DateTime.now().millisecond - recordStartDate!.millisecond);
      }
      recordStartDate = null;

      await mRecorder!.closeRecorder();
    }
  }

  String getAudioFile() {
    return tmpFile;
  }

  int getAudioTime() {
    return time;
  }

  int calcAudioTime() {
    int realTime = 0;
    if (recordStartDate != null) {
      realTime = (DateTime.now().millisecond - recordStartDate!.millisecond);
    }
    return time + realTime;
  }

  cancel() async {
    await stopRecord();
    if (tmpFile == null) return;
    try {
      String rootDir = ImSingleton.getInstance().rootDir;
      var filePath = rootDir + tmpFile;
      var file = File(filePath);
      file.deleteSync();
    } catch (error, stackTrace) {
      _logger.e('[audio record remove tmp file]', error: error, stackTrace: stackTrace);
    }
  }
}
