import 'dart:async';
import 'dart:collection';
import 'dart:typed_data';
import 'dart:core';
import 'package:flutter/foundation.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:flutter_sound_platform_interface/flutter_sound_recorder_platform_interface.dart';

class MircoPhoneOpenOld {
  static MircoPhoneOpenOld? _instance;

  Queue<Uint8List> queue = Queue<Uint8List>();

  MircoPhoneOpenOld._();

  factory MircoPhoneOpenOld() {
    _instance ??= MircoPhoneOpenOld._();
    return _instance!;
  }

  FlutterSoundRecorder? _audioRecorder;
  FlutterSoundPlayer? _audioPlayer;
  StreamController<Uint8List>? recordingDataController;
  MediaStream? _mediaStream;

  void audioPlayer(Uint8List buffer) {
    if (_audioPlayer == null || buffer.length == 0) {
      return;
    }
    try {
      _audioPlayer?.foodSink?.add(FoodData(buffer));
    } catch (e) {
      print('Failed to stop recording: $e');
    }
  }

  void init() async {
    if (_audioPlayer != null) {
      return;
    } else {
      _audioPlayer = FlutterSoundPlayer();
      _audioPlayer!.openPlayer();
      _audioPlayer!.startPlayerFromStream(
          codec: Codec.pcm16, numChannels: 1, sampleRate: 16000);
    }
    /*  if (!WebRTC.initialized) {
      await WebRTC.initialize();
    } */
  }

  Future<RTCPeerConnection> _createPeerConnection(
      MediaStreamTrack audioTrack, MediaStream stream) async {
    final Map<String, dynamic> configuration = {
      'iceServers': [
        {'url': 'stun:stun.l.google.com:19302'},
      ],
    };

    final Map<String, dynamic> constraints = {
      'audio': {
        'echoCancellation': true,
        'autoGainControl': true,
      },
      'mandatory': {
        'OfferToReceiveAudio': true,
        'OfferToReceiveVideo': false,
      },
      'optional': [],
    };

    RTCPeerConnection pc =
        await createPeerConnection(configuration, constraints);
    pc.addTrack(audioTrack, stream);
    return pc;
  }

  void startRecording(Function callback) async {
    try {
      stopRecording();
      _mediaStream = await navigator.mediaDevices.getUserMedia({
        'audio': {
          'echoCancellation': true,
          'autoGainControl': true,
        },
        'video': false
      }).then((value) async {
        MediaStreamTrack track = value.getAudioTracks().first;
        await _createPeerConnection(track, value);
        track.enableSpeakerphone(true);
        _audioRecorder = FlutterSoundRecorder();
        recordingDataController = StreamController<Uint8List>();
        _audioRecorder!.openRecorder().then((value) {
          _audioRecorder!
              .startRecorder(
                  codec: Codec.pcm16,
                  audioSource: AudioSource.microphone,
                  numChannels: 1,
                  toStream: recordingDataController)
              .then((value) {
            recordingDataController!.stream.listen((data) {
              //拿到音频数据流
              callback(data);
            });
          });
        });
        return value;
      });
    } catch (e) {
      print('Failed to start recording: $e');
    }
  }

  void stopRecording() async {
    try {
      await _audioRecorder?.stopRecorder();
      await _audioRecorder?.closeRecorder();
      _audioRecorder = null;
      recordingDataController?.close();
      recordingDataController = null;
      _mediaStream?.dispose();
      _mediaStream = null;
    } catch (e) {
      print('Failed to stop recording: $e');
    }
  }
}
