import 'dart:io';

import 'package:record/record.dart';

import '../../../../util/DateUtil.dart';
import '../../../../util/FileUtil.dart';
import '../../../../util/ImSingleton.dart';
import '../../../../util/LoggerUtil.dart';

class Audio2RecordService {
  final _logger = createLogger();

  // 语音保存至文件中
  String tmpFile = '';
  DateTime? recordStartDate = null;
  int time = 0;
  AudioRecorder? mRecorder = null;

  Audio2RecordService() {
    var userId = ImSingleton.getInstance().session.userId;
    tmpFile = "/$userId/capture/audio/${DateUtil.formatSimpleDate(DateTime.now())}.m4a";
    var rootDir = ImSingleton.getInstance().rootDir;
    var tmpDir =  "$rootDir/$userId/capture/audio/";
    FileUtil.mkDirs(tmpDir);
  }

  // 开始录制对应的流程
  startRecord() async {
    try {
      if (mRecorder != null) {
        await mRecorder!.cancel();
        mRecorder = null;
      }

      mRecorder = AudioRecorder();
      recordStartDate = DateTime.now();
      String rootDir = ImSingleton.getInstance().rootDir;
      var filePath = rootDir + tmpFile;

      if (await mRecorder!.hasPermission()) {
        await mRecorder!.start(const RecordConfig(encoder: AudioEncoder.aacLc), path: filePath);
        // await mRecorder!.startStream(const RecordConfig(encoder: AudioEncoder.pcm16bits));
      }

      _logger.i('[audio record start!]', time: DateTime.now());
    } catch (error, stackTrace) {
      _logger.e('[audio record start error]', error: error, stackTrace: stackTrace);
    }
  }

  // 暂停录制对应的流程
  pauseRecord() async {
    if (mRecorder != null && await mRecorder!.isRecording()) {
      // 仅在started状态下调用pause为合理状态切换
      if (recordStartDate != null) {
        time += (DateTime.now().millisecondsSinceEpoch - recordStartDate!.millisecondsSinceEpoch);
      }
      _logger.i('[audio record pause!]', time: DateTime.now());
      await mRecorder!.pause();
    }
  }

  // 恢复录制对应的流程
  resumeRecord() async {
    if (mRecorder != null && await mRecorder!.isPaused()) {
      // 仅在paused状态下调用resume为合理状态切换
      recordStartDate = DateTime.now();
      _logger.i('[audio record resume!]', time: DateTime.now());
      await mRecorder!.resume();
    }
  }

  // 停止录制对应的流程
  stopRecord() async {
    _logger.i('[audio record stop!]', time: DateTime.now());
    if (mRecorder != null) {
      if (recordStartDate != null) {
        time += (DateTime.now().millisecondsSinceEpoch - recordStartDate!.millisecondsSinceEpoch);
      }
      recordStartDate = null;

      await mRecorder!.stop();
    }
  }

  closeRecord() async {
    _logger.i('[audio record close!]', time: DateTime.now());
    if (mRecorder != null) {
      if (recordStartDate != null) {
        time += (DateTime.now().millisecondsSinceEpoch - recordStartDate!.millisecondsSinceEpoch);
      }
      recordStartDate = null;

      await mRecorder!.stop();
      // await mRecorder!.cancel();
      await mRecorder!.dispose();
    }
  }

  String getAudioFile() {
    return tmpFile;
  }

  int getAudioTime() {
    return time;
  }

  int calcAudioTime() {
    int realTime = 0;
    if (recordStartDate != null) {
      realTime = (DateTime.now().millisecondsSinceEpoch - recordStartDate!.millisecondsSinceEpoch);
    }
    return time + realTime;
  }

  cancel() async {
    await stopRecord();
    if (tmpFile == null) return;
    try {
      String rootDir = ImSingleton.getInstance().rootDir;
      var filePath = rootDir + tmpFile;
      var file = File(filePath);
      file.deleteSync();
    } catch (error, stackTrace) {
      _logger.e('[audio record remove tmp file]', error: error, stackTrace: stackTrace);
    }
  }
}
