import 'dart:convert';

import 'package:bxultimate/app/router/router_name.dart';
import 'package:bxultimate/data/request/organization_request.dart';
import 'package:bxultimate/data/store/auth_store.dart';
import 'package:bxultimate/data/store/im_store.dart';
import 'package:bxultimate/shared/shared.dart';
import 'package:bxultimate/shared/util/util_mqtt.dart';
import 'package:bxultimate/widgets/webrtc/rtc_util.dart';
import 'package:flutter_smart_dialog/flutter_smart_dialog.dart';
import 'package:get/get.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'state.dart';

import 'dart:async';
import 'dart:io';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/material.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';

class OrganizationChatController extends GetxController {
  final OrganizationChatState state = OrganizationChatState();

  FlutterSoundPlayer? _mPlayer = FlutterSoundPlayer();
  FlutterSoundRecorder? _mRecorder = FlutterSoundRecorder();
  bool _mPlayerIsInited = false;
  bool _mRecorderIsInited = false;
  bool _mplaybackReady = false;
  String? _mPath;
  StreamSubscription? _mRecordingDataSubscription;
  int tSampleRate = 48000;

  var page = 1;
  var pageSize = 10;

  @override
  void onInit() {
    // TODO: implement onInit
    super.onInit();

    _mPlayer!.openPlayer().then((value) {
      _mPlayerIsInited = true;
    });
    _openRecorder();
  }


  Future<void> _openRecorder() async {
    var status = await Permission.microphone.request();
    if (status != PermissionStatus.granted) {
      throw RecordingPermissionException('Microphone permission not granted');
    }
    await _mRecorder!.openRecorder();

    final session = await AudioSession.instance;
    await session.configure(AudioSessionConfiguration(
      avAudioSessionCategory: AVAudioSessionCategory.playAndRecord,
      avAudioSessionCategoryOptions:
          AVAudioSessionCategoryOptions.allowBluetooth |
              AVAudioSessionCategoryOptions.defaultToSpeaker,
      avAudioSessionMode: AVAudioSessionMode.spokenAudio,
      avAudioSessionRouteSharingPolicy:
          AVAudioSessionRouteSharingPolicy.defaultPolicy,
      avAudioSessionSetActiveOptions: AVAudioSessionSetActiveOptions.none,
      androidAudioAttributes: const AndroidAudioAttributes(
        contentType: AndroidAudioContentType.speech,
        flags: AndroidAudioFlags.none,
        usage: AndroidAudioUsage.voiceCommunication,
      ),
      androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
      androidWillPauseWhenDucked: true,
    ));
    _mRecorderIsInited = true;
  }

  Future<IOSink> createFile() async {
    var tempDir = await getTemporaryDirectory();
    _mPath = '${tempDir.path}/${DateTime.timestamp().toString()}.pcm';
    var outputFile = File(_mPath!);
    if (outputFile.existsSync()) {
      await outputFile.delete();
    }
    return outputFile.openWrite();
  }

  // ----------------------  Here is the code to record to a Stream ------------

  Future<void> record() async {
    print("_mRecorderIsInited : ${_mRecorderIsInited}");
    print("_mPlayer!.isStopped : ${_mPlayer!.isStopped}");
    assert(_mRecorderIsInited && _mPlayer!.isStopped);
    var sink = await createFile();
    var recordingDataController = StreamController<Food>();
    _mRecordingDataSubscription =
        recordingDataController.stream.listen((buffer) {
      if (buffer is FoodData) {
        print('Recorder received ${buffer.data!}');
        sink.add(buffer.data!);

        ImStore.getInstance().publishMessage('bdjw-common-topic', json.encode({
          'fromId': AuthStore.to.profile.id,
              'data':buffer.data!,
            }));
      }
    });
    await _mRecorder!.startRecorder(
      toStream: recordingDataController.sink,
      codec: Codec.pcm16,
      numChannels: 1,
      sampleRate: tSampleRate,
    );
  }

  // --------------------- (it was very simple, wasn't it ?) -------------------

  Future<void> stopRecorder() async {
    await _mRecorder!.stopRecorder();
    if (_mRecordingDataSubscription != null) {
      await _mRecordingDataSubscription!.cancel();
      _mRecordingDataSubscription = null;
    }
    _mplaybackReady = true;
  }

  getRecorderFn() {
    if (!_mRecorderIsInited || !_mPlayer!.isStopped) {
      return null;
    }
    return _mRecorder!.isStopped
        ? record
        : () {
            stopRecorder();
          };
  }


  void toOrganizationInformationPage() {
    Get.toNamed(RouterName.organizationInformation, arguments: {
      "orgId": Get.arguments["orgId"],
      "attributionCode": Get.arguments['attributionCode'],
      "areaLsgx": Get.arguments['areaLsgx']
    });
  }

  void updatePosition() async {
    String position =
        UtilSp.getString(AppConstant.DEVICE_POSITION, defValue: '')!;
    await OrganizationRequest.updateUserPosition(
        position.split(",")[0], position.split(',')[1]);

    SmartDialog.showNotify(msg: '定位更新成功', notifyType: NotifyType.success);
  }

  void say() async {
    // 开始说话
    state.sayText = '松开结束';
    record();
  }

  void sayCancel() async {
    state.sayText = '按住说话';
    stopRecorder();
  }
}
