import 'dart:async';
import 'dart:convert';
import 'dart:typed_data';

import 'package:agora_rtc_engine/src/agora_base.dart';
import 'package:agora_rtc_engine/src/agora_h265_transcoder.dart';
import 'package:agora_rtc_engine/src/agora_log.dart';
import 'package:agora_rtc_engine/src/agora_media_base.dart';
import 'package:agora_rtc_engine/src/agora_media_engine.dart';
import 'package:agora_rtc_engine/src/agora_media_player.dart';
import 'package:agora_rtc_engine/src/agora_media_recorder.dart';
import 'package:agora_rtc_engine/src/agora_music_content_center.dart';
import 'package:agora_rtc_engine/src/agora_rhythm_player.dart';
import 'package:agora_rtc_engine/src/agora_rtc_engine.dart';
import 'package:agora_rtc_engine/src/agora_rtc_engine_ex.dart';
import 'package:agora_rtc_engine/src/agora_rtc_engine_ext.dart';
import 'package:agora_rtc_engine/src/agora_spatial_audio.dart';
import 'package:agora_rtc_engine/src/audio_device_manager.dart';
import 'package:agora_rtc_engine/src/binding/agora_base_event_impl.dart';
import 'package:agora_rtc_engine/src/binding/agora_media_base_event_impl.dart';
import 'package:agora_rtc_engine/src/binding/agora_media_engine_impl.dart';
import 'package:agora_rtc_engine/src/binding/agora_rtc_engine_event_impl.dart';
import 'package:agora_rtc_engine/src/binding/agora_rtc_engine_ex_impl.dart'
    as rtc_engine_ex_binding;
import 'package:agora_rtc_engine/src/binding/agora_rtc_engine_impl.dart'
    as rtc_engine_binding;
import 'package:agora_rtc_engine/src/binding/agora_spatial_audio_impl.dart';
import 'package:agora_rtc_engine/src/binding/call_api_event_handler_buffer_ext.dart';
import 'package:agora_rtc_engine/src/binding/event_handler_param_json.dart';
import 'package:agora_rtc_engine/src/impl/agora_h265_transcoder_impl_override.dart';
import 'package:agora_rtc_engine/src/impl/agora_media_engine_impl_override.dart'
    as media_engine_impl;
import 'package:agora_rtc_engine/src/impl/agora_media_recorder_impl_override.dart'
    as media_recorder_impl;
import 'package:agora_rtc_engine/src/impl/agora_music_content_center_impl_override.dart'
    as mcci;
import 'package:agora_rtc_engine/src/impl/agora_spatial_audio_impl_override.dart'
    as agora_spatial_audio_impl;
import 'package:agora_rtc_engine/src/impl/audio_device_manager_impl.dart'
    as audio_device_manager_impl;
import 'package:agora_rtc_engine/src/impl/media_player_impl.dart'
    as media_player_impl;
import 'package:agora_rtc_engine/src/impl/platform/io/global_video_view_controller_platform_ohos.dart';

import 'package:agora_rtc_engine/src/impl/platform/platform_bindings_provider.dart';
import 'package:async/async.dart' show AsyncMemoizer;
import 'package:flutter/foundation.dart'
    show
        ChangeNotifier,
        debugPrint,
        defaultTargetPlatform,
        kIsWeb,
        visibleForTesting;
import 'package:flutter/services.dart' show MethodCall, MethodChannel;
import 'package:flutter/widgets.dart' show VoidCallback, TargetPlatform;
import 'package:iris_method_channel/iris_method_channel.dart';
import 'package:json_annotation/json_annotation.dart';
import 'package:meta/meta.dart';

import '../binding/agora_media_recorder_ohos_impl.dart';
import 'media_player_ohos_impl.dart';
import 'platform/global_video_view_controller.dart';

// ignore_for_file: public_member_api_docs

class RtcEngineOhosImpl implements RtcEngine, RtcEngineEx {
  RtcEngineOhosImpl._(this.engineMethodChannel) {
    engineMethodChannel.setMethodCallHandler((call) async {
      if (call.method == 'onUserJoined') {
        print("agora_rtc_ng callHandler onUserJoined ${call.arguments.toString()}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onUserJoined?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              int.parse(call.arguments['collapse']));
        });
        return true;
      } else if (call.method == 'onUserOffline') {
        print("agora_rtc_ng callHandler onUserOffline ${call.arguments.toString()}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int collapse = int.parse(call.arguments['collapse']);
        UserOfflineReasonType reason = UserOfflineReasonType.userOfflineQuit;
        switch (collapse) {
          case 0:
            reason = UserOfflineReasonType.userOfflineQuit;
            break;
          case 1:
            reason = UserOfflineReasonType.userOfflineDropped;
            break;
          case 2:
            reason = UserOfflineReasonType.userOfflineBecomeAudience;
            break;
        }
        _eventHandler.forEach((element) {
          element.onUserOffline
              ?.call(rtcConnection, int.parse(call.arguments['uid']), reason);
        });
        return true;
      } else if (call.method == 'onJoinChannelSuccess') {
        print("agora_rtc_ng callHandler onJoinChannelSuccess ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['cid'],
            localUid: int.parse(call.arguments['uid']));
        isJoinChannel = true;
        print(
            "agora_rtc_ng _eventHandler onJoinChannelSuccess ${_eventHandler.hashCode}");
        _eventHandler.forEach((element) {
          element.onJoinChannelSuccess
              ?.call(rtcConnection, int.parse(call.arguments['elapsed']));
        });
        return true;
      } else if (call.method == 'onRejoinChannelSuccess') {
        print("agora_rtc_ng callHandler onRejoinChannelSuccess ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['cid'],
            localUid: int.parse(call.arguments['uid']));
        isJoinChannel = true;
        _eventHandler.forEach((element) {
          element.onRejoinChannelSuccess
              ?.call(rtcConnection, int.parse(call.arguments['elapsed']));
        });
        return true;
      } else if (call.method == 'onLeaveChannel') {
        print("agora_rtc_ng callHandler onLeaveChannel ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        isJoinChannel = false;
        _eventHandler.forEach((element) {
          element.onLeaveChannel?.call(rtcConnection, const RtcStats());
        });

        return true;
      } else if (call.method == 'onConnectionLost') {
        print("agora_rtc_ng callHandler onConnectionLost ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onConnectionLost?.call(rtcConnection);
        });

        return true;
      } else if (call.method == 'onNetworkQuality') {
        print("agora_rtc_ng callHandler onNetworkQuality ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int txQuality = int.parse(call.arguments['txQuality']);
        int rxQuality = int.parse(call.arguments['rxQuality']);
        QualityType txQualityType = QualityType.qualityUnknown;
        QualityType rxQualityType = QualityType.qualityUnknown;
        for (var e in QualityType.values) {
          if (e.value() == txQuality) {
            txQualityType = e;
          } else if (e.value() == rxQuality) {
            rxQualityType = e;
          }
        }
        _eventHandler.forEach((element) {
          element.onNetworkQuality?.call(rtcConnection,
              int.parse(call.arguments['uid']), txQualityType, rxQualityType);
        });

        return true;
      } else if (call.method == 'onConnectionStateChanged') {
        print("agora_rtc_ng callHandler onConnectionStateChanged ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int state = int.parse(call.arguments['state']);
        int reason = int.parse(call.arguments['reason']);
        ConnectionStateType stateType =
            ConnectionStateType.connectionStateDisconnected;
        ConnectionChangedReasonType reasonType =
            ConnectionChangedReasonType.connectionChangedConnecting;
        for (var e in ConnectionStateType.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        for (var e in ConnectionChangedReasonType.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onConnectionStateChanged
              ?.call(rtcConnection, stateType, reasonType);
        });

        return true;
      } else if (call.method == 'onRtcStats') {
        print("agora_rtc_ng callHandler onRtcStats ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        RtcStats stats = jsonDecode(call.arguments['state']);
        _eventHandler.forEach((element) {
          element.onRtcStats?.call(rtcConnection, stats);
        });

        return true;
      } else if (call.method == 'onRemoteAudioStateChanged') {
        print("agora_rtc_ng callHandler onRemoteAudioStateChanged ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int state = int.parse(call.arguments['state']);
        RemoteAudioState stateType = RemoteAudioState.remoteAudioStateStopped;
        for (var e in RemoteAudioState.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int reason = int.parse(call.arguments['reason']);
        RemoteAudioStateReason reasonType =
            RemoteAudioStateReason.remoteAudioReasonInternal;
        for (var e in RemoteAudioStateReason.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onRemoteAudioStateChanged?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              stateType,
              reasonType,
              int.parse(call.arguments['elapsed']));
        });
        return true;
      } else if (call.method == 'onRemoteVideoStateChanged') {
        print("agora_rtc_ng callHandler onRemoteVideoStateChanged ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int state = int.parse(call.arguments['state']);
        RemoteVideoState stateType = RemoteVideoState.remoteVideoStateStopped;
        for (var e in RemoteVideoState.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int reason = int.parse(call.arguments['reason']);
        RemoteVideoStateReason reasonType =
            RemoteVideoStateReason.remoteVideoStateReasonInternal;
        for (var e in RemoteVideoStateReason.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onRemoteVideoStateChanged?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              stateType,
              reasonType,
              int.parse(call.arguments['elapsed']));
        });
        return true;
      } else if (call.method == 'onAudioMixingPositionChanged') {
        print(
            "agora_rtc_ng callHandler onAudioMixingPositionChanged ${call.arguments}");
        _eventHandler.forEach((element) {
          element.onAudioMixingPositionChanged
              ?.call(int.parse(call.arguments['position']));
        });
        return true;
      } else if (call.method == 'onAudioMixingStateChanged') {
        print("agora_rtc_ng callHandler onAudioMixingStateChanged ${call.arguments}");
        int state = int.parse(call.arguments['state']);
        AudioMixingStateType stateType =
            AudioMixingStateType.audioMixingStatePlaying;
        for (var e in AudioMixingStateType.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int reason = int.parse(call.arguments['reason']);
        AudioMixingReasonType reasonType =
            AudioMixingReasonType.audioMixingReasonCanNotOpen;
        for (var e in AudioMixingReasonType.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onAudioMixingStateChanged?.call(stateType, reasonType);
        });
        return true;
      } else if (call.method == 'onLocalVideoStateChanged') {
        print("agora_rtc_ng callHandler onLocalVideoStateChanged ${call.arguments}");
        int source = int.parse(call.arguments['source']);
        VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary;
        for (var e in VideoSourceType.values) {
          if (e.value() == source) {
            sourceType = e;
            break;
          }
        }
        int state = int.parse(call.arguments['state']);
        LocalVideoStreamState stateType =
            LocalVideoStreamState.localVideoStreamStateStopped;
        for (var e in LocalVideoStreamState.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int reason = int.parse(call.arguments['reason']);
        LocalVideoStreamReason reasonType =
            LocalVideoStreamReason.localVideoStreamReasonOk;
        for (var e in LocalVideoStreamReason.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onLocalVideoStateChanged
              ?.call(sourceType, stateType, reasonType);
        });
        return true;
      } else if (call.method == 'onLocalAudioStateChanged') {
        print("agora_rtc_ng callHandler onLocalAudioStateChanged ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int state = int.parse(call.arguments['state']);
        LocalAudioStreamState stateType =
            LocalAudioStreamState.localAudioStreamStateStopped;
        for (var e in LocalAudioStreamState.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int error = int.parse(call.arguments['error']);
        LocalAudioStreamReason errorType =
            LocalAudioStreamReason.localAudioStreamReasonOk;
        for (var e in LocalAudioStreamReason.values) {
          if (e.value() == error) {
            errorType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onLocalAudioStateChanged
              ?.call(rtcConnection, stateType, errorType);
        });
        return true;
      } else if (call.method == 'onAudioVolumeIndication') {
        print("agora_rtc_ng callHandler onAudioVolumeIndication ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        List<AudioVolumeInfo> speakers = jsonDecode(call.arguments['speakers']);
        _eventHandler.forEach((element) {
          element.onAudioVolumeIndication?.call(rtcConnection, speakers, 1,
              int.parse(call.arguments['totalVolume']));
        });
        return true;
      } else if (call.method == 'onError') {
        print("agora_rtc_ng callHandler onError ${call.arguments}");
        int error = int.parse(call.arguments['err']);
        ErrorCodeType errorType = ErrorCodeType.errOk;
        for (var e in ErrorCodeType.values) {
          if (e.value() == error) {
            errorType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onError?.call(errorType, call.arguments['message']);
        });
        return true;
      } else if (call.method == 'onSnapshotTaken') {
        print("agora_rtc_ng callHandler onSnapshotTaken ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onSnapshotTaken?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              call.arguments['filePath'],
              int.parse(call.arguments['width']),
              int.parse(call.arguments['height']),
              int.parse(call.arguments['errCode']));
        });
        return true;
      } else if (call.method == 'onSnapshotTaken') {
        print("agora_rtc_ng callHandler onSnapshotTaken ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onSnapshotTaken?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              call.arguments['filePath'],
              int.parse(call.arguments['width']),
              int.parse(call.arguments['height']),
              int.parse(call.arguments['errCode']));
        });
        return true;
      } else if (call.method == 'onVideoSizeChanged') {
        print("agora_rtc_ng callHandler onVideoSizeChanged ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int source = int.parse(call.arguments['source']);
        VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary;
        for (var e in VideoSourceType.values) {
          if (e.value() == source) {
            sourceType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onVideoSizeChanged?.call(
              rtcConnection,
              sourceType,
              int.parse(call.arguments['uid']),
              int.parse(call.arguments['width']),
              int.parse(call.arguments['height']),
              int.parse(call.arguments['rotation']));
        });
        return true;
      } else if (call.method == 'onStreamMessage') {
        print("agora_rtc_ng callHandler onStreamMessage ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        Uint8List bytes = call.arguments['data'];
        _eventHandler.forEach((element) {
          element.onStreamMessage?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              int.parse(call.arguments['streamId']),
              bytes,
              bytes.length,
              0);
        });
        return true;
      } else if (call.method == 'onFacePositionChanged') {
        print("agora_rtc_ng callHandler onFacePositionChanged ${call.arguments}");
        List<Rectangle> faceRectArr = jsonDecode(call.arguments['faceRectArr']);
        List<int> vecDistance = jsonDecode(call.arguments['vecDistance']);
        _eventHandler.forEach((element) {
          element.onFacePositionChanged?.call(
              int.parse(call.arguments['imageWidth']),
              int.parse(call.arguments['imageHeight']),
              faceRectArr,
              vecDistance,
              0);
        });
        return true;
      } else if (call.method == 'onLocalAudioStats') {
        print("agora_rtc_ng callHandler onLocalAudioStats ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        LocalAudioStats stats = jsonDecode(call.arguments['stats']);
        _eventHandler.forEach((element) {
          element.onLocalAudioStats?.call(rtcConnection, stats);
        });
        return true;
      } else if (call.method == 'onLocalVideoStats') {
        print("agora_rtc_ng callHandler onLocalVideoStats ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        LocalVideoStats stats = jsonDecode(call.arguments['stats']);
        _eventHandler.forEach((element) {
          element.onLocalVideoStats?.call(rtcConnection, stats);
        });
        return true;
      } else if (call.method == 'onRemoteAudioStats') {
        print("agora_rtc_ng callHandler onRemoteAudioStats ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        RemoteAudioStats stats = jsonDecode(call.arguments['stats']);
        _eventHandler.forEach((element) {
          element.onRemoteAudioStats?.call(rtcConnection, stats);
        });
        return true;
      } else if (call.method == 'onRemoteVideoStats') {
        print("agora_rtc_ng callHandler onRemoteVideoStats ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        RemoteVideoStats stats = jsonDecode(call.arguments['stats']);
        _eventHandler.forEach((element) {
          element.onRemoteVideoStats?.call(rtcConnection, stats);
        });
        return true;
      } else if (call.method == 'onPermissionError') {
        print("agora_rtc_ng callHandler onPermissionError ${call.arguments}");
        int permission = int.parse(call.arguments['permission']);
        PermissionType permissionType = PermissionType.recordAudio;
        for (var e in PermissionType.values) {
          if (e.value() == permission) {
            permissionType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onPermissionError?.call(permissionType);
        });
        return true;
      } else if (call.method == 'onLastmileQuality') {
        print("agora_rtc_ng callHandler onLastmileQuality ${call.arguments}");
        int quality = int.parse(call.arguments['quality']);
        QualityType qualityType = QualityType.qualityUnknown;
        for (var e in QualityType.values) {
          if (e.value() == quality) {
            qualityType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onLastmileQuality?.call(qualityType);
        });
        return true;
      } else if (call.method == 'onLastmileProbeResult') {
        print("agora_rtc_ng callHandler onLastmileProbeResult ${call.arguments}");
        LastmileProbeResult result = jsonDecode(call.arguments['result']);
        _eventHandler.forEach((element) {
          element.onLastmileProbeResult?.call(result);
        });
        return true;
      } else if (call.method == 'onRtmpStreamingStateChanged') {
        print(
            "agora_rtc_ng callHandler onRtmpStreamingStateChanged ${call.arguments}");
        int state = int.parse(call.arguments['state']);
        RtmpStreamPublishState stateType =
            RtmpStreamPublishState.rtmpStreamPublishStateIdle;
        for (var e in RtmpStreamPublishState.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int reason = int.parse(call.arguments['reason']);
        RtmpStreamPublishReason reasonType =
            RtmpStreamPublishReason.rtmpStreamPublishReasonOk;
        for (var e in RtmpStreamPublishReason.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onRtmpStreamingStateChanged
              ?.call(call.arguments['url'], stateType, reasonType);
        });
        return true;
      } else if (call.method == 'onRtmpStreamingEvent') {
        print("agora_rtc_ng callHandler onRtmpStreamingEvent ${call.arguments}");
        int event = int.parse(call.arguments['event']);
        RtmpStreamingEvent eventType =
            RtmpStreamingEvent.rtmpStreamingEventFailedLoadImage;
        for (var e in RtmpStreamingEvent.values) {
          if (e.value() == event) {
            eventType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onRtmpStreamingEvent?.call(call.arguments['url'], eventType);
        });
        return true;
      } else if (call.method == 'onTranscodingUpdated') {
        print("agora_rtc_ng callHandler onTranscodingUpdated ${call.arguments}");
        _eventHandler.forEach((element) {
          element.onTranscodingUpdated?.call();
        });
        return true;
      } else if (call.method == 'onStreamMessageError') {
        print("agora_rtc_ng callHandler onStreamMessageError ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int error = int.parse(call.arguments['error']);
        ErrorCodeType errorType = ErrorCodeType.errOk;
        for (var e in ErrorCodeType.values) {
          if (e.value() == error) {
            errorType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onStreamMessageError?.call(
              rtcConnection,
              int.parse(call.arguments['uid']),
              int.parse(call.arguments['streamId']),
              errorType,
              int.parse(call.arguments['missed']),
              int.parse(call.arguments['cached']));
        });
        return true;
      } else if (call.method == 'onClientRoleChanged') {
        print("agora_rtc_ng callHandler onClientRoleChanged ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int oldRole = int.parse(call.arguments['oldRole']);
        ClientRoleType oldRoleType = ClientRoleType.clientRoleBroadcaster;
        for (var e in ClientRoleType.values) {
          if (e.value() == oldRole) {
            oldRoleType = e;
            break;
          }
        }
        int newRole = int.parse(call.arguments['newRole']);
        ClientRoleType newRoleType = ClientRoleType.clientRoleBroadcaster;
        for (var e in ClientRoleType.values) {
          if (e.value() == newRole) {
            newRoleType = e;
            break;
          }
        }
        ClientRoleOptions options =
            jsonDecode(call.arguments['newRoleOptions']);
        _eventHandler.forEach((element) {
          element.onClientRoleChanged
              ?.call(rtcConnection, oldRoleType, newRoleType, options);
        });
        return true;
      } else if (call.method == 'onVideoPublishStateChanged') {
        print(
            "agora_rtc_ng callHandler onVideoPublishStateChanged ${call.arguments}");
        int source = int.parse(call.arguments['source']);
        VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary;
        for (var e in VideoSourceType.values) {
          if (e.value() == source) {
            sourceType = e;
            break;
          }
        }
        int oldState = int.parse(call.arguments['oldState']);
        StreamPublishState oldStateType = StreamPublishState.pubStateIdle;
        for (var e in StreamPublishState.values) {
          if (e.value() == oldState) {
            oldStateType = e;
            break;
          }
        }
        int newState = int.parse(call.arguments['newState']);
        StreamPublishState newStateType = StreamPublishState.pubStateIdle;
        for (var e in StreamPublishState.values) {
          if (e.value() == newState) {
            newStateType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onVideoPublishStateChanged?.call(
              sourceType,
              call.arguments['channel'],
              oldStateType,
              newStateType,
              int.parse(call.arguments['elapseSinceLastState']));
        });
        return true;
      } else if (call.method == 'onEncryptionError') {
        print("agora_rtc_ng callHandler onEncryptionError ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int errorType = int.parse(call.arguments['errorType']);
        EncryptionErrorType errorTypeType =
            EncryptionErrorType.encryptionErrorInternalFailure;
        for (var e in EncryptionErrorType.values) {
          if (e.value() == errorType) {
            errorTypeType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onEncryptionError?.call(rtcConnection, errorTypeType);
        });
        return true;
      } else if (call.method == 'onClientRoleChangeFailed') {
        print("agora_rtc_ng callHandler onClientRoleChangeFailed ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        int reason = int.parse(call.arguments['reason']);
        ClientRoleChangeFailedReason reasonType = ClientRoleChangeFailedReason
            .clientRoleChangeFailedTooManyBroadcasters;
        for (var e in ClientRoleChangeFailedReason.values) {
          if (e.value() == reason) {
            reasonType = e;
            break;
          }
        }
        int currentRole = int.parse(call.arguments['currentRole']);
        ClientRoleType currentRoleType = ClientRoleType.clientRoleBroadcaster;
        for (var e in ClientRoleType.values) {
          if (e.value() == currentRole) {
            currentRoleType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onClientRoleChangeFailed
              ?.call(rtcConnection, reasonType, currentRoleType);
        });
        return true;
      } else if (call.method == 'onTokenPrivilegeWillExpire') {
        print(
            "agora_rtc_ng callHandler onTokenPrivilegeWillExpire ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onTokenPrivilegeWillExpire
              ?.call(rtcConnection, call.arguments['token']);
        });
        return true;
      } else if (call.method == 'onAudioPublishStateChanged') {
        print(
            "agora_rtc_ng callHandler onAudioPublishStateChanged ${call.arguments}");
        int oldState = int.parse(call.arguments['oldState']);
        StreamPublishState oldStateType = StreamPublishState.pubStateIdle;
        for (var e in StreamPublishState.values) {
          if (e.value() == oldState) {
            oldStateType = e;
            break;
          }
        }
        int newState = int.parse(call.arguments['newState']);
        StreamPublishState newStateType = StreamPublishState.pubStateIdle;
        for (var e in StreamPublishState.values) {
          if (e.value() == newState) {
            newStateType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onAudioPublishStateChanged?.call(
              call.arguments['channel'],
              oldStateType,
              newStateType,
              int.parse(call.arguments['elapseSinceLastState']));
        });
        return true;
      } else if (call.method == 'onVideoSubscribeStateChanged') {
        print(
            "agora_rtc_ng callHandler onVideoSubscribeStateChanged ${call.arguments}");
        int oldState = int.parse(call.arguments['oldState']);
        StreamSubscribeState oldStateType = StreamSubscribeState.subStateIdle;
        for (var e in StreamSubscribeState.values) {
          if (e.value() == oldState) {
            oldStateType = e;
            break;
          }
        }
        int newState = int.parse(call.arguments['newState']);
        StreamSubscribeState newStateType = StreamSubscribeState.subStateIdle;
        for (var e in StreamSubscribeState.values) {
          if (e.value() == newState) {
            newStateType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onVideoSubscribeStateChanged?.call(
              call.arguments['channel'],
              int.parse(call.arguments['uid']),
              oldStateType,
              newStateType,
              int.parse(call.arguments['elapseSinceLastState']));
        });
        return true;
      } else if (call.method == 'onAudioSubscribeStateChanged') {
        print(
            "agora_rtc_ng callHandler onAudioSubscribeStateChanged ${call.arguments}");
        int oldState = int.parse(call.arguments['oldState']);
        StreamSubscribeState oldStateType = StreamSubscribeState.subStateIdle;
        for (var e in StreamSubscribeState.values) {
          if (e.value() == oldState) {
            oldStateType = e;
            break;
          }
        }
        int newState = int.parse(call.arguments['newState']);
        StreamSubscribeState newStateType = StreamSubscribeState.subStateIdle;
        for (var e in StreamSubscribeState.values) {
          if (e.value() == newState) {
            newStateType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onAudioSubscribeStateChanged?.call(
              call.arguments['channel'],
              int.parse(call.arguments['uid']),
              oldStateType,
              newStateType,
              int.parse(call.arguments['elapseSinceLastState']));
        });
        return true;
      } else if (call.method == 'onChannelMediaRelayStateChanged') {
        print(
            "agora_rtc_ng callHandler onChannelMediaRelayStateChanged ${call.arguments}");
        int state = int.parse(call.arguments['state']);
        ChannelMediaRelayState stateType =
            ChannelMediaRelayState.relayStateIdle;
        for (var e in ChannelMediaRelayState.values) {
          if (e.value() == state) {
            stateType = e;
            break;
          }
        }
        int code = int.parse(call.arguments['code']);
        ChannelMediaRelayError codeType = ChannelMediaRelayError.relayOk;
        for (var e in ChannelMediaRelayError.values) {
          if (e.value() == code) {
            codeType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onChannelMediaRelayStateChanged?.call(stateType, codeType);
        });
        return true;
      } else if (call.method == 'onFirstRemoteVideoFrame') {
        print("agora_rtc_ng callHandler onFirstRemoteVideoFrame ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onFirstRemoteVideoFrame?.call(
            rtcConnection,
            int.parse(call.arguments['uid']),
            int.parse(call.arguments['width']),
            int.parse(call.arguments['height']),
            int.parse(call.arguments['elapsed']),
          );
        });
        return true;
      } else if (call.method == 'onUserMuteAudio') {
        print("agora_rtc_ng callHandler onUserMuteAudio ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onUserMuteAudio?.call(rtcConnection,
              int.parse(call.arguments['uid']), call.arguments['muted']);
        });
        return true;
      } else if (call.method == 'onUserMuteVideo') {
        print("agora_rtc_ng callHandler onUserMuteVideo ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onUserMuteVideo?.call(rtcConnection,
              int.parse(call.arguments['uid']), call.arguments['muted']);
        });
        return true;
      } else if (call.method == 'onFirstLocalVideoFrame') {
        print("agora_rtc_ng callHandler onFirstLocalVideoFrame ${call.arguments}");
        int source = int.parse(call.arguments['source']);
        VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary;
        for (var e in VideoSourceType.values) {
          if (e.value() == source) {
            sourceType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onFirstLocalVideoFrame?.call(
              sourceType,
              int.parse(call.arguments['width']),
              int.parse(call.arguments['height']),
              int.parse(call.arguments['elapsed']));
        });
        return true;
      } else if (call.method == 'onRequestToken') {
        print("agora_rtc_ng callHandler onRequestToken ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onRequestToken?.call(rtcConnection);
        });
        return true;
      } else if (call.method == 'onCameraFocusPointChanged') {
        print("agora_rtc_ng callHandler onCameraFocusPointChanged ${call.arguments}");
        // RtcConnection rtcConnection = RtcConnection(channelId:call.arguments['channelId'],localUid:  int.parse(call.arguments['localUserId']));
        // _eventHandler.onCameraFocusPointChanged?.call(rtcConnection);
        return true;
      } else if (call.method == 'onCameraFocusPointChanged') {
        print("agora_rtc_ng callHandler onCameraFocusPointChanged ${call.arguments}");
        // RtcConnection rtcConnection = RtcConnection(channelId:call.arguments['channelId'],localUid:  int.parse(call.arguments['localUserId']));
        // _eventHandler.onAudioRoutingChanged?.call(rtcConnection);
        return true;
      } else if (call.method == 'onLocalVideoTranscoderError') {
        print(
            "agora_rtc_ng callHandler onLocalVideoTranscoderError ${call.arguments}");
        TranscodingVideoStream stream = call.arguments['stream'];
        int error = int.parse(call.arguments['error']);
        VideoTranscoderError errorType =
            VideoTranscoderError.vtErrVideoSourceNotReady;
        for (var e in VideoTranscoderError.values) {
          if (e.value() == error) {
            errorType = e;
            break;
          }
        }
        _eventHandler.forEach((element) {
          element.onLocalVideoTranscoderError?.call(stream, errorType);
        });
        return true;
      } else if (call.method == 'onAudioEffectFinished') {
        print("agora_rtc_ng callHandler onAudioEffectFinished ${call.arguments}");
        _eventHandler.forEach((element) {
          element.onAudioEffectFinished
              ?.call(int.parse(call.arguments['soundId']));
        });
        return true;
      } else if (call.method == 'onCameraExposureAreaChanged') {
        print(
            "agora_rtc_ng callHandler onCameraExposureAreaChanged ${call.arguments}");
        _eventHandler.forEach((element) {
          element.onCameraExposureAreaChanged?.call(
            int.parse(call.arguments['x']),
            int.parse(call.arguments['y']),
            int.parse(call.arguments['width']),
            int.parse(call.arguments['height']),
          );
        });
        return true;
      } else if (call.method == 'onUserEnableVideo') {
        print("agora_rtc_ng callHandler onUserEnableVideo ${call.arguments}");
        RtcConnection rtcConnection = RtcConnection(
            channelId: call.arguments['channelId'],
            localUid: int.parse(call.arguments['localUserId']));
        _eventHandler.forEach((element) {
          element.onUserEnableVideo?.call(rtcConnection,
              int.parse(call.arguments['uid']), call.arguments['enabled']);
        });
        return true;
      }
      return false;
    });
  }

  static RtcEngineOhosImpl? _instance;

  late MethodChannel engineMethodChannel;

  List<RtcEngineEventHandler> _eventHandler = [];

  bool isJoinChannel = false;

  static RtcEngineOhosImpl create() {
    if (_instance != null) {
      return _instance!;
    }
    MethodChannel methodChannel = const MethodChannel("agora_rtc_ng/engine");

    _instance = RtcEngineOhosImpl._(methodChannel);

    return _instance!;
  }

  @override
  Future<void> addVideoWatermark(
      {required String watermarkUrl, required WatermarkOptions options}) async {
    print(
        "agora_rtc_ng flutter addVideoWatermark watermarkUrl:${watermarkUrl}, options: ${options.toJson().toString()}");
    final Map<String, dynamic> args = {
      "watermarkUrl": watermarkUrl,
      "options": jsonEncode(options.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_addVideoWatermark", args);
  }

  @override
  Future<void> adjustAudioMixingPlayoutVolume(int volume) async {
    engineMethodChannel.invokeMethod("RtcEngine_adjustAudioMixingPlayoutVolume",
        {"volume": volume.toString()});
  }

  @override
  Future<void> adjustAudioMixingPublishVolume(int volume) async {
    engineMethodChannel.invokeMethod("RtcEngine_adjustAudioMixingPublishVolume",
        {"volume": volume.toString()});
  }

  @override
  Future<void> adjustAudioMixingVolume(int volume) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_adjustAudioMixingVolume", {"volume": volume.toString()});
  }

  @override
  Future<void> adjustCustomAudioPlayoutVolume(
      {required int trackId, required int volume}) async {
    engineMethodChannel.invokeMethod("RtcEngine_adjustCustomAudioPlayoutVolume",
        {"volume": volume.toString(), "trackId": trackId.toString()});
  }

  @override
  Future<void> adjustCustomAudioPublishVolume(
      {required int trackId, required int volume}) async {
    engineMethodChannel.invokeMethod("RtcEngine_adjustCustomAudioPublishVolume",
        {"volume": volume.toString(), "trackId": trackId.toString()});
  }

  @override
  Future<void> adjustLoopbackSignalVolume(int volume) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_adjustLoopbackSignalVolume", {"volume": volume.toString()});
  }

  @override
  Future<void> adjustPlaybackSignalVolume(int volume) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_adjustPlaybackSignalVolume", {"volume": volume.toString()});
  }

  @override
  Future<void> adjustRecordingSignalVolume(int volume) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_adjustRecordingSignalVolume", {"volume": volume.toString()});
  }

  @override
  Future<void> adjustUserPlaybackSignalVolume(
      {required int uid, required int volume}) async {
    engineMethodChannel.invokeMethod("RtcEngine_adjustUserPlaybackSignalVolume",
        {"volume": volume.toString(), "uid": uid.toString()});
  }

  @override
  Future<void> clearVideoWatermarks() async {
    engineMethodChannel.invokeMethod("RtcEngine_clearVideoWatermarks", {});
  }

  @override
  Future<void> complain(
      {required String callId, required String description}) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_complain", {"callId": callId, "description": description});
  }

  @override
  Future<void> configRhythmPlayer(AgoraRhythmPlayerConfig config) async {
    print(
        "agora_rtc_ng flutter configRhythmPlayer config: ${config.toJson().toString()}");
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_configRhythmPlayer", args);
  }

  @override
  Future<int> createCustomEncodedVideoTrack(SenderOptions senderOption) async {
    print(
        "agora_rtc_ng flutter createCustomEncodedVideoTrack senderOption: ${senderOption.toJson().toString()}");
    final Map<String, dynamic> args = {
      "senderOption": jsonEncode(senderOption.toJson()),
    };
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_createCustomEncodedVideoTrack", args)
        .then<int>((int? value) => value ?? -1);
  }

  @override
  Future<int> createCustomVideoTrack() {
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_createCustomVideoTrack")
        .then<int>((int? value) => value ?? -1);
  }

  @override
  Future<int> createDataStream(DataStreamConfig config) {
    print(
        "agora_rtc_ng flutter createDataStream config: ${config.toJson().toString()}");
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
    };
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_createDataStream", args)
        .then<int>((int? value) => value ?? -1);
  }

  @override
  Future<MediaPlayer?> createMediaPlayer() async {
    var result = await engineMethodChannel
        .invokeMethod<int>("RtcEngine_createMediaPlayer")
        .then<int>((int? value) => value ?? -1);
    final MediaPlayer mediaPlayer =
        MediaPlayerOhosImpl.create(result, engineMethodChannel);
    return mediaPlayer;
  }

  @override
  Future<MediaRecorder?> createMediaRecorder(RecorderStreamInfo info) async {
    final Map<String, dynamic> args = {
      "info": jsonEncode(info.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_createMediaRecorder", args);
    final MediaRecorderOhosImpl recorder = MediaRecorderOhosImpl.create();
    return recorder;
  }

  @override
  Future<void> destroyCustomEncodedVideoTrack(int videoTrackId) async {
    engineMethodChannel.invokeMethod("RtcEngine_destroyCustomEncodedVideoTrack",
        {"videoTrackId": videoTrackId.toString()});
  }

  @override
  Future<void> destroyCustomVideoTrack(int videoTrackId) async {
    engineMethodChannel.invokeMethod("RtcEngine_destroyCustomVideoTrack",
        {"videoTrackId": videoTrackId.toString()});
  }

  @override
  Future<void> destroyMediaPlayer(MediaPlayer mediaPlayer) async {
    // engineMethodChannel.invokeMethod("RtcEngine_destroyCustomVideoTrack",{"videoTrackId":videoTrackId.toString()});
  }

  @override
  Future<void> destroyMediaRecorder(MediaRecorder mediaRecorder) async {
    engineMethodChannel.invokeMethod("RtcEngine_destroyMediaRecorder");
  }

  @override
  Future<void> disableAudio() async {
    engineMethodChannel.invokeMethod("RtcEngine_disableAudio", {});
  }

  @override
  Future<void> disableAudioSpectrumMonitor() async {
    engineMethodChannel
        .invokeMethod("RtcEngine_disableAudioSpectrumMonitor", {});
  }

  @override
  Future<void> disableVideo() async {
    engineMethodChannel.invokeMethod("RtcEngine_disableVideo", {});
  }

  @override
  Future<void> enableAudio() async {
    engineMethodChannel.invokeMethod("RtcEngine_enableAudio");
  }

  @override
  Future<void> enableAudioSpectrumMonitor({int intervalInMS = 100}) async {
    engineMethodChannel.invokeMethod("RtcEngine_enableAudioSpectrumMonitor",
        {"intervalInMS": intervalInMS.toString()});
  }

  @override
  Future<void> enableAudioVolumeIndication(
      {required int interval,
      required int smooth,
      required bool reportVad}) async {
    engineMethodChannel.invokeMethod("RtcEngine_enableAudioVolumeIndication", {
      "interval": interval.toString(),
      "smooth": smooth.toString(),
      "reportVad": reportVad,
    });
  }

  @override
  Future<void> enableCameraCenterStage(bool enabled) async {
    engineMethodChannel.invokeMethod("RtcEngine_enableCameraCenterStage", {
      "enabled": enabled,
    });
  }

  @override
  Future<void> enableContentInspect(
      {required bool enabled, required ContentInspectConfig config}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableContentInspect", args);
  }

  @override
  Future<void> enableCustomAudioLocalPlayback(
      {required int trackId, required bool enabled}) async {
    engineMethodChannel
        .invokeMethod("RtcEngine_enableCustomAudioLocalPlayback", {
      "trackId": trackId.toString(),
      "enabled": enabled,
    });
  }

  @override
  Future<void> enableDualStreamMode(
      {required bool enabled, SimulcastStreamConfig? streamConfig}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "streamConfig":
          streamConfig != null ? jsonEncode(streamConfig.toJson()) : "",
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableDualStreamMode", args);
  }

  @override
  Future<void> enableEncryption(
      {required bool enabled, required EncryptionConfig config}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableEncryption", args);
  }

  @override
  Future<void> enableExtension(
      {required String provider,
      required String extension,
      bool enable = true,
      MediaSourceType type = MediaSourceType.unknownMediaSource}) async {
    final Map<String, dynamic> args = {
      "provider": provider,
      "extension": extension,
      "type": type.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableExtension", args);
  }

  @override
  Future<void> enableFaceDetection(bool enabled) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableFaceDetection", args);
  }

  @override
  Future<void> enableInEarMonitoring(
      {required bool enabled,
      required EarMonitoringFilterType includeAudioFilters}) async {
    engineMethodChannel.invokeMethod("RtcEngine_enableInEarMonitoring", {
      "enabled": enabled,
      "includeAudioFilters": includeAudioFilters.value().toString()
    });
  }

  @override
  Future<void> enableInstantMediaRendering() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod(
        "RtcEngine_enableInstantMediaRendering", args);
  }

  @override
  Future<void> enableLocalAudio(bool enabled) async {
    print("agora_rtc_ng enableLocalAudio enabled:$enabled");
    engineMethodChannel
        .invokeMethod("RtcEngine_enableLocalAudio", {"enabled": enabled});
  }

  @override
  Future<void> enableLocalVideo(bool enabled) async {
    final Map<String, dynamic> args = {"enabled": enabled};
    engineMethodChannel.invokeMethod("RtcEngine_enableLocalVideo", args);
  }

  @override
  Future<void> enableLoopbackRecording(
      {required bool enabled, String? deviceName}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "deviceName": deviceName ?? "",
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableLoopbackRecording", args);
  }

  @override
  Future<void> enableMultiCamera(
      {required bool enabled,
      required CameraCapturerConfiguration config}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableMultiCamera", args);
  }

  @override
  Future<void> enableSoundPositionIndication(bool enabled) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_enableSoundPositionIndication", args);
  }

  @override
  Future<void> enableSpatialAudio(bool enabled) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableSpatialAudio", args);
  }

  @override
  Future<void> enableVideo() async {
    engineMethodChannel.invokeMethod("RtcEngine_enableVideo");
  }

  @override
  Future<void> enableVideoImageSource(
      {required bool enable, required ImageTrackOptions options}) async {
    final Map<String, dynamic> args = {
      "enable": enable,
      "options": jsonEncode(options.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableVideoImageSource", args);
  }

  @override
  Future<void> enableVirtualBackground(
      {required bool enabled,
      required VirtualBackgroundSource backgroundSource,
      required SegmentationProperty segproperty,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "backgroundSource": jsonEncode(backgroundSource.toJson()),
      "segproperty": jsonEncode(segproperty.toJson()),
      "type": type.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableVirtualBackground", args);
  }

  @override
  Future<void> enableVoiceAITuner(
      {required bool enabled, required VoiceAiTunerType type}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "type": type.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableVoiceAITuner", args);
  }

  @override
  Future<void> enableWebSdkInteroperability(bool enabled) async {}

  @override
  Future<void> enableWirelessAccelerate(bool enabled) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_enableWirelessAccelerate", args);
  }

  @override
  Future<DeviceInfo> getAudioDeviceInfo() {
    // TODO: implement getAudioDeviceInfo
    throw UnimplementedError();
  }

  @override
  AudioDeviceManager getAudioDeviceManager() {
    // TODO: implement getAudioDeviceManager
    throw UnimplementedError();
  }

  @override
  Future<int> getAudioMixingCurrentPosition() {
    final Map<String, dynamic> args = {};
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getAudioMixingCurrentPosition", args)
        .then((value) => value ?? -1);
  }

  @override
  Future<int> getAudioMixingDuration() {
    final Map<String, dynamic> args = {};
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getAudioMixingDuration", args)
        .then((value) => value ?? -1);
  }

  @override
  Future<int> getAudioMixingPlayoutVolume() {
    final Map<String, dynamic> args = {};
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getAudioMixingPlayoutVolume", args)
        .then((value) => value ?? -1);
  }

  @override
  Future<int> getAudioMixingPublishVolume() {
    final Map<String, dynamic> args = {};
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getAudioMixingPublishVolume", args)
        .then((value) => value ?? -1);
  }

  @override
  Future<int> getAudioTrackCount() {
    final Map<String, dynamic> args = {};
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getAudioTrackCount", args)
        .then((value) => value ?? -1);
  }

  @override
  Future<String> getCallId() {
    final Map<String, dynamic> args = {};
    return engineMethodChannel
        .invokeMethod<String>("RtcEngine_getCallId", args)
        .then((value) => value ?? "");
  }

  @override
  Future<double> getCameraMaxZoomFactor() {
    return engineMethodChannel
        .invokeMethod<double>("RtcEngine_getCameraMaxZoomFactor")
        .then((value) => value ?? 0);
  }

  @override
  Future<ConnectionStateType> getConnectionState() {
    return Future.value(ConnectionStateType.connectionStateConnected);
  }

  @override
  Future<int> getCurrentMonotonicTimeInMs() {
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getCurrentMonotonicTimeInMs")
        .then((value) => value ?? 0);
  }

  @override
  Future<int> getEffectCurrentPosition(int soundId) {
    final Map<String, dynamic> args = {
      "soundId": soundId.toString(),
    };
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getEffectCurrentPosition", args)
        .then((value) => value ?? -1);
  }

  @override
  Future<int> getEffectDuration(String filePath) {
    return Future.value(0);
  }

  @override
  Future<int> getEffectsVolume() {
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_getEffectsVolume")
        .then((value) => value ?? -1);
  }

  @override
  Future<String> getErrorDescription(int code) {
    return Future.value("");
  }

  @override
  Future<String> getExtensionProperty(
      {required String provider,
      required String extension,
      required String key,
      required int bufLen,
      MediaSourceType type = MediaSourceType.unknownMediaSource}) {
    return Future.value("");
  }

  @override
  Future<FaceShapeAreaOptions> getFaceShapeAreaOptions(
      {required FaceShapeArea shapeArea,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) {
    return Future.value(const FaceShapeAreaOptions(
        shapeArea: FaceShapeArea.faceShapeAreaCheek, shapeIntensity: 0));
  }

  @override
  Future<FaceShapeBeautyOptions> getFaceShapeBeautyOptions(
      {MediaSourceType type = MediaSourceType.primaryCameraSource}) {
    return Future.value(const FaceShapeBeautyOptions(
        shapeStyle: FaceShapeBeautyStyle.faceShapeBeautyStyleFemale,
        styleIntensity: 0));
  }

  @override
  H265Transcoder getH265Transcoder() {
    // TODO: implement getH265Transcoder
    throw UnimplementedError();
  }

  @override
  LocalSpatialAudioEngine getLocalSpatialAudioEngine() {
    // TODO: implement getLocalSpatialAudioEngine
    throw UnimplementedError();
  }

  @override
  Future<int> getLoopbackRecordingVolume() {
    return Future.value(0);
  }

  @override
  MediaEngine getMediaEngine() {
    // TODO: implement getMediaEngine
    throw UnimplementedError();
  }

  @override
  MusicContentCenter getMusicContentCenter() {
    // TODO: implement getMusicContentCenter
    throw UnimplementedError();
  }

  @override
  Future<int> getNativeHandle() {
    return engineMethodChannel
        .invokeMethod<String>("RtcEngine_getNativeHandle")
        .then((String? value) => int.parse(value ?? "0"));
  }

  @override
  Future<int> getNetworkType() {
    return Future.value(0);
  }

  @override
  Future<int> getNtpWallTimeInMs() {
    return Future.value(0);
  }

  @override
  Future<List<ScreenCaptureSourceInfo>> getScreenCaptureSources(
      {required SIZE thumbSize,
      required SIZE iconSize,
      required bool includeScreen}) {
    // TODO: implement getScreenCaptureSources
    throw UnimplementedError();
  }

  @override
  Future<UserInfo> getUserInfoByUid(int uid) {
    // TODO: implement getUserInfoByUid
    return Future.value(const UserInfo(uid: 0, userAccount: "1"));
  }

  @override
  Future<UserInfo> getUserInfoByUserAccount(String userAccount) {
    if (isJoinChannel) {
      return engineMethodChannel.invokeMethod<String>(
          "RtcEngine_getUserInfoByUserAccount", {
        "userAccount": userAccount
      }).then((value) => jsonDecode(value ?? ""));
    }
    throw UnimplementedError();
  }

  @override
  Future<SDKBuildInfo> getVersion() {
    return Future.value(const SDKBuildInfo(build: 100, version: "1.0.0"));
  }

  @override
  VideoDeviceManager getVideoDeviceManager() {
    // TODO: implement getVideoDeviceManager
    throw UnimplementedError();
  }

  @override
  Future<int> getVolumeOfEffect(int soundId) {
    return engineMethodChannel.invokeMethod<int>("RtcEngine_getVolumeOfEffect",
        {"soundId": soundId.toString()}).then((value) => value ?? -1);
  }

  @override
  Future<void> initialize(RtcEngineContext context) async {
    engineMethodChannel
        .invokeMethod("RtcEngine_initialize", {"appId": context.appId});
  }

  @override
  Future<bool> isCameraAutoExposureFaceModeSupported() {
    return Future.value(true);
  }

  @override
  Future<bool> isCameraAutoFocusFaceModeSupported() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isCameraAutoFocusFaceModeSupported")
        .then((value) => value ?? false);
  }

  @override
  Future<bool> isCameraCenterStageSupported() {
    return Future.value(true);
  }

  @override
  Future<bool> isCameraExposurePositionSupported() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isCameraExposurePositionSupported")
        .then((value) => value ?? false);
  }

  @override
  Future<bool> isCameraExposureSupported() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isCameraExposureSupported")
        .then((value) => value ?? false);
  }

  @override
  Future<bool> isCameraFaceDetectSupported() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isCameraFaceDetectSupported")
        .then((value) => value ?? false);
  }

  @override
  Future<bool> isCameraFocusSupported() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isCameraFocusSupported")
        .then((value) => value ?? false);
  }

  @override
  Future<bool> isCameraTorchSupported() {
    return Future.value(false);
  }

  @override
  Future<bool> isCameraZoomSupported() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isCameraZoomSupported")
        .then((value) => value ?? false);
  }

  @override
  Future<bool> isFeatureAvailableOnDevice(FeatureType type) {
    return Future.value(false);
  }

  @override
  Future<bool> isSpeakerphoneEnabled() {
    return engineMethodChannel
        .invokeMethod<bool>("RtcEngine_isSpeakerphoneEnabled")
        .then((value) => value ?? false);
  }

  @override
  Future<void> joinChannel(
      {required String token,
      required String channelId,
      required int uid,
      required ChannelMediaOptions options}) async {
    print("agora_rtc_ng flutter joinChannel ${jsonEncode(options.toJson())}");
    final Map<String, dynamic> args = {
      "token": token,
      "channelId": channelId,
      "uid": uid.toString(),
      "options": jsonEncode(options.toJson())
    };
    engineMethodChannel.invokeMethod("RtcEngine_joinChannel", args);
  }

  @override
  Future<void> joinChannelWithUserAccount(
      {required String token,
      required String channelId,
      required String userAccount,
      ChannelMediaOptions? options}) async {
    final Map<String, dynamic> args = {
      "token": token,
      "channelId": channelId,
      "userAccount": userAccount,
    };
    if (options != null) args['options'] = jsonEncode(options.toJson());
    engineMethodChannel.invokeMethod(
        "RtcEngine_joinChannelWithUserAccount", args);
  }

  @override
  Future<void> joinChannelWithUserAccountEx(
      {required String token,
      required String channelId,
      required String userAccount,
      required ChannelMediaOptions options}) async {}

  @override
  Future<void> leaveChannel({LeaveChannelOptions? options}) async {
    engineMethodChannel.invokeMethod("RtcEngine_leaveChannel");
  }

  @override
  Future<void> loadExtensionProvider(
      {required String path, bool unloadAfterUse = false}) async {}

  @override
  Future<void> muteAllRemoteAudioStreams(bool mute) async {
    final Map<String, dynamic> args = {
      "mute": mute,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_muteAllRemoteAudioStreams", args);
  }

  @override
  Future<void> muteAllRemoteVideoStreams(bool mute) async {
    final Map<String, dynamic> args = {
      "mute": mute,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_muteAllRemoteVideoStreams", args);
  }

  @override
  Future<void> muteLocalAudioStream(bool mute) async {
    final Map<String, dynamic> args = {
      "mute": mute,
    };
    engineMethodChannel.invokeMethod("RtcEngine_muteLocalAudioStream", args);
  }

  @override
  Future<void> muteLocalVideoStream(bool mute) async {
    final Map<String, dynamic> args = {
      "mute": mute,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_muteLocalVideoStream", args);
  }

  @override
  Future<void> muteRecordingSignal(bool mute) async {
    final Map<String, dynamic> args = {
      "mute": mute,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_muteRecordingSignal", args);
  }

  @override
  Future<void> muteRemoteAudioStream(
      {required int uid, required bool mute}) async {
    final Map<String, dynamic> args = {
      "mute": mute,
      "uid": uid.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_muteRemoteAudioStream", args);
  }

  @override
  Future<void> muteRemoteVideoStream(
      {required int uid, required bool mute}) async {
    final Map<String, dynamic> args = {
      "mute": mute,
      "uid": uid.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_muteRemoteVideoStream", args);
  }

  @override
  Future<void> pauseAllChannelMediaRelay() async {
    engineMethodChannel
        .invokeMethod<int>("RtcEngine_pauseAllChannelMediaRelay");
  }

  @override
  Future<void> pauseAllEffects() async {
    engineMethodChannel.invokeMethod<int>("RtcEngine_pauseAllEffects");
  }

  @override
  Future<void> pauseAudio() async {}

  @override
  Future<void> pauseAudioMixing() async {
    engineMethodChannel.invokeMethod<int>("RtcEngine_pauseAudioMixing");
  }

  @override
  Future<void> pauseEffect(int soundId) async {
    final Map<String, dynamic> args = {
      "soundId": soundId.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_pauseEffect", args);
  }

  @override
  Future<void> playAllEffects(
      {required int loopCount,
      required double pitch,
      required double pan,
      required int gain,
      bool publish = false}) async {
    final Map<String, dynamic> args = {
      "loopCount": loopCount.toString(),
      "pitch": pitch.toString(),
      "pan": pan.toString(),
      "gain": gain.toString(),
      "publish": publish,
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_playAllEffects", args);
  }

  @override
  Future<void> playEffect(
      {required int soundId,
      required String filePath,
      required int loopCount,
      required double pitch,
      required double pan,
      required int gain,
      bool publish = false,
      int startPos = 0}) async {
    final Map<String, dynamic> args = {
      "soundId": soundId.toString(),
      "filePath": filePath,
      "loopCount": loopCount.toString(),
      "pitch": pitch.toString(),
      "pan": pan.toString(),
      "gain": gain.toString(),
      "publish": publish,
      "startPos": startPos.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_playEffect", args);
  }

  @override
  Future<void> preloadChannel(
      {required String token,
      required String channelId,
      required int uid}) async {}

  @override
  Future<void> preloadChannelWithUserAccount(
      {required String token,
      required String channelId,
      required String userAccount}) async {
    final Map<String, dynamic> args = {
      "token": token,
      "channelId": channelId,
      "userAccount": userAccount,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_preloadChannelWithUserAccount", args);
  }

  @override
  Future<void> preloadEffect(
      {required int soundId,
      required String filePath,
      int startPos = 0}) async {
    final Map<String, dynamic> args = {
      "soundId": soundId.toString(),
      "filePath": filePath,
      "startPos": startPos.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_preloadEffect", args);
  }

  @override
  Future<List<FocalLengthInfo>> queryCameraFocalLengthCapability() async {
    return [const FocalLengthInfo()];
  }

  @override
  Future<List<CodecCapInfo>> queryCodecCapability(int size) async {
    return [const CodecCapInfo()];
  }

  @override
  Future<int> queryDeviceScore() {
    return Future.value(0);
  }

  @override
  Future<HdrCapability> queryHDRCapability(VideoModuleType videoModule) async {
    return HdrCapability.hdrCapabilitySupported;
  }

  @override
  Future<int> queryScreenCaptureCapability() {
    return Future.value(0);
  }

  @override
  Future<void> rate(
      {required String callId,
      required int rating,
      required String description}) async {}

  @override
  void registerAudioEncodedFrameObserver(
      {required AudioEncodedFrameObserverConfig config,
      required AudioEncodedFrameObserver observer}) {}

  @override
  void registerAudioSpectrumObserver(AudioSpectrumObserver observer) {}

  @override
  void registerEventHandler(RtcEngineEventHandler eventHandler) {
    print("agora_rtc_ng registerEventHandler---------- ${eventHandler.hashCode}");
    _eventHandler.add(eventHandler);
  }

  @override
  Future<void> registerExtension(
      {required String provider,
      required String extension,
      MediaSourceType type = MediaSourceType.unknownMediaSource}) async {}

  @override
  Future<void> registerLocalUserAccount(
      {required String appId, required String userAccount}) async {
    final Map<String, dynamic> args = {
      "appId": appId,
      "userAccount": userAccount,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_registerLocalUserAccount", args);
  }

  @override
  void registerMediaMetadataObserver(
      {required MetadataObserver observer, required MetadataType type}) {
    // final Map<String, dynamic> args = {
    //   "appId":appId,
    //   "userAccount":userAccount,
    // };
    // engineMethodChannel.invokeMethod<int>("RtcEngine_registerMediaMetadataObserver",args);
  }

  @override
  Future<void> release({bool sync = false}) async {
    engineMethodChannel.invokeMethod("RtcEngine_release");
  }

  @override
  Future<void> renewToken(String token) async {
    final Map<String, dynamic> args = {
      "token": token,
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_renewToken", args);
  }

  @override
  Future<void> resumeAllChannelMediaRelay() async {
    engineMethodChannel
        .invokeMethod<int>("RtcEngine_resumeAllChannelMediaRelay");
  }

  @override
  Future<void> resumeAllEffects() async {
    engineMethodChannel.invokeMethod<int>("RtcEngine_resumeAllEffects");
  }

  @override
  Future<void> resumeAudio() async {
    engineMethodChannel.invokeMethod<int>("RtcEngine_resumeAudio");
  }

  @override
  Future<void> resumeAudioMixing() async {
    engineMethodChannel.invokeMethod<int>("RtcEngine_resumeAudioMixing");
  }

  @override
  Future<void> resumeEffect(int soundId) async {
    final Map<String, dynamic> args = {
      "soundId": soundId.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_resumeEffect", args);
  }

  @override
  Future<void> selectAudioTrack(int index) async {
    final Map<String, dynamic> args = {
      "index": index.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_selectAudioTrack", args);
  }

  @override
  Future<void> sendAudioMetadata(
      {required Uint8List metadata, required int length}) async {
    final Map<String, dynamic> args = {
      "metadata": base64Encode(metadata),
      "length": length.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_sendAudioMetadata", args);
  }

  @override
  Future<void> sendCustomReportMessage(
      {required String id,
      required String category,
      required String event,
      required String label,
      required int value}) async {}

  @override
  Future<void> sendMetaData(
      {required Metadata metadata,
      required VideoSourceType sourceType}) async {}

  @override
  Future<void> sendStreamMessage(
      {required int streamId,
      required Uint8List data,
      required int length}) async {
    final Map<String, dynamic> args = {
      "streamId": streamId.toString(),
      "data": data,
      "length": length.toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_sendStreamMessage", args);
  }

  @override
  Future<void> setAINSMode(
      {required bool enabled, required AudioAinsMode mode}) async {}

  @override
  Future<void> setAVSyncSource(
      {required String channelId, required int uid}) async {}

  @override
  Future<void> setAdvancedAudioOptions(
      {required AdvancedAudioOptions options, int sourceType = 0}) async {}

  @override
  Future<void> setAudioEffectParameters(
      {required AudioEffectPreset preset,
      required int param1,
      required int param2}) async {}

  @override
  Future<void> setAudioEffectPreset(AudioEffectPreset preset) async {}

  @override
  Future<void> setAudioMixingDualMonoMode(AudioMixingDualMonoMode mode) async {}

  @override
  Future<void> setAudioMixingPitch(int pitch) async {}

  @override
  Future<void> setAudioMixingPlaybackSpeed(int speed) async {
    final Map<String, dynamic> args = {
      "speed": speed.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setAudioMixingPlaybackSpeed", args);
  }

  @override
  Future<void> setAudioMixingPosition(int pos) async {
    final Map<String, dynamic> args = {
      "pos": pos.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setAudioMixingPosition", args);
  }

  @override
  Future<void> setAudioProfile(
      {required AudioProfileType profile,
      AudioScenarioType scenario =
          AudioScenarioType.audioScenarioDefault}) async {
    final Map<String, dynamic> args = {
      "profile": profile.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setAudioProfile", args);
  }

  @override
  Future<void> setAudioScenario(AudioScenarioType scenario) async {
    final Map<String, dynamic> args = {
      "scenario": scenario.value().toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_setAudioScenario", args);
  }

  @override
  Future<void> setAudioSessionOperationRestriction(
      AudioSessionOperationRestriction restriction) async {}

  @override
  Future<void> setBeautyEffectOptions(
      {required bool enabled,
      required BeautyOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "options": jsonEncode(options.toJson()),
      "type": type.value().toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setBeautyEffectOptions", args);
  }

  @override
  Future<void> setCameraAutoExposureFaceModeEnabled(bool enabled) async {}

  @override
  Future<void> setCameraAutoFocusFaceModeEnabled(bool enabled) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setCameraAutoFocusFaceModeEnabled", args);
  }

  @override
  Future<void> setCameraCapturerConfiguration(
      CameraCapturerConfiguration config) async {
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setCameraCapturerConfiguration", args);
  }

  @override
  Future<void> setCameraDeviceOrientation(
      {required VideoSourceType type,
      required VideoOrientation orientation}) async {}

  @override
  Future<void> setCameraExposureFactor(double factor) async {
    final Map<String, dynamic> args = {
      "factor": factor.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setCameraExposureFactor", args);
  }

  @override
  Future<void> setCameraExposurePosition(
      {required double positionXinView,
      required double positionYinView}) async {
    final Map<String, dynamic> args = {
      "positionXinView": positionXinView.toString(),
      "positionYinView": positionYinView.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setCameraExposurePosition", args);
  }

  @override
  Future<void> setCameraFocusPositionInPreview(
      {required double positionX, required double positionY}) async {
    final Map<String, dynamic> args = {
      "positionX": positionX.toString(),
      "positionY": positionY.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setCameraFocusPositionInPreview", args);
  }

  @override
  Future<void> setCameraStabilizationMode(CameraStabilizationMode mode) async {}

  @override
  Future<void> setCameraTorchOn(bool isOn) async {}

  @override
  Future<void> setCameraZoomFactor(double factor) async {
    final Map<String, dynamic> args = {
      "factor": factor.toString(),
    };
    engineMethodChannel.invokeMethod<int>(
        "RtcEngine_setCameraZoomFactor", args);
  }

  @override
  Future<void> setChannelProfile(ChannelProfileType profile) async {
    final Map<String, dynamic> args = {
      "profile": profile.value().toString(),
    };
    engineMethodChannel.invokeMethod<int>("RtcEngine_setChannelProfile", args);
  }

  @override
  Future<void> setClientRole(
      {required ClientRoleType role, ClientRoleOptions? options}) async {
    print("agora_rtc_ng flutter setClientRole ${options?.toJson().toString()}");
    final Map<String, dynamic> args = {
      "role": role.value().toString(),
      "options": options != null ? jsonEncode(options.toJson()) : "",
    };
    engineMethodChannel.invokeMethod("RtcEngine_setClientRole", args);
  }

  @override
  Future<void> setCloudProxy(CloudProxyType proxyType) async {}

  @override
  Future<void> setColorEnhanceOptions(
      {required bool enabled,
      required ColorEnhanceOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {}

  @override
  Future<void> setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_setDefaultAudioRouteToSpeakerphone",
        {"defaultToSpeaker": defaultToSpeaker});
  }

  @override
  Future<void> setDirectCdnStreamingAudioConfiguration(
      AudioProfileType profile) async {}

  @override
  Future<void> setDirectCdnStreamingVideoConfiguration(
      VideoEncoderConfiguration config) {
    // TODO: implement setDirectCdnStreamingVideoConfiguration
    throw UnimplementedError();
  }

  @override
  Future<void> setDualStreamMode(
      {required SimulcastStreamMode mode,
      SimulcastStreamConfig? streamConfig}) async {
    final Map<String, dynamic> args = {
      "mode": mode.value().toString(),
      "streamConfig":
          streamConfig != null ? jsonEncode(streamConfig.toJson()) : "",
    };
    engineMethodChannel.invokeMethod("RtcEngine_setDualStreamMode", args);
  }

  @override
  Future<void> setEarMonitoringAudioFrameParameters(
      {required int sampleRate,
      required int channel,
      required RawAudioFrameOpModeType mode,
      required int samplesPerCall}) async {}

  @override
  Future<void> setEffectPosition(
      {required int soundId, required int pos}) async {}

  @override
  Future<void> setEffectsVolume(int volume) async {
    final Map<String, dynamic> args = {
      "volume": volume.toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setEffectsVolume", args);
  }

  @override
  Future<void> setEnableSpeakerphone(bool speakerOn) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_setEnableSpeakerphone", {"speakerOn": speakerOn});
  }

  @override
  Future<void> setExtensionProperty(
      {required String provider,
      required String extension,
      required String key,
      required String value,
      MediaSourceType type = MediaSourceType.unknownMediaSource}) async {}

  @override
  Future<void> setExtensionProviderProperty(
      {required String provider,
      required String key,
      required String value}) async {}

  @override
  Future<void> setExternalMediaProjection(int mediaProjection) async {}

  @override
  Future<void> setFaceShapeAreaOptions(
      {required FaceShapeAreaOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {}

  @override
  Future<void> setFaceShapeBeautyOptions(
      {required bool enabled,
      required FaceShapeBeautyOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {}

  @override
  Future<void> setFilterEffectOptions(
      {required bool enabled,
      required FilterEffectOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {}

  @override
  Future<void> setHeadphoneEQParameters(
      {required int lowGain, required int highGain}) async {}

  @override
  Future<void> setHeadphoneEQPreset(HeadphoneEqualizerPreset preset) async {}

  @override
  Future<void> setHighPriorityUserList(
      {required List<int> uidList,
      required int uidNum,
      required StreamFallbackOptions option}) async {}

  @override
  Future<void> setInEarMonitoringVolume(int volume) async {
    engineMethodChannel.invokeMethod(
        "RtcEngine_setInEarMonitoringVolume", {"volume", volume.toString()});
  }

  @override
  Future<void> setLocalAccessPoint(LocalAccessPointConfiguration config) async {
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setLocalAccessPoint", args);
  }

  @override
  Future<void> setLocalPublishFallbackOption(
      StreamFallbackOptions option) async {}

  @override
  Future<void> setLocalRenderMode(
      {required RenderModeType renderMode,
      VideoMirrorModeType mirrorMode =
          VideoMirrorModeType.videoMirrorModeAuto}) async {
    final Map<String, dynamic> args = {
      "renderMode": renderMode.value().toString(),
      "mirrorMode": mirrorMode.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setLocalRenderMode", args);
  }

  @override
  Future<void> setLocalRenderTargetFps(
      {required VideoSourceType sourceType, required int targetFps}) async {}

  @override
  Future<void> setLocalVideoMirrorMode(VideoMirrorModeType mirrorMode) async {}

  @override
  Future<void> setLocalVoiceEqualization(
      {required AudioEqualizationBandFrequency bandFrequency,
      required int bandGain}) async {}

  @override
  Future<void> setLocalVoiceFormant(double formantRatio) async {}

  @override
  Future<void> setLocalVoicePitch(double pitch) async {}

  @override
  Future<void> setLocalVoiceReverb(
      {required AudioReverbType reverbKey, required int value}) async {}

  @override
  Future<void> setLogFile(String filePath) async {
    final Map<String, dynamic> args = {
      "filePath": filePath,
    };
    engineMethodChannel.invokeMethod("RtcEngine_setLogFile", args);
  }

  @override
  Future<void> setLogFileSize(int fileSizeInKBytes) async {
    final Map<String, dynamic> args = {
      "fileSizeInKBytes": fileSizeInKBytes.toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setLogFileSize", args);
  }

  @override
  Future<void> setLogFilter(LogFilterType filter) async {
    final Map<String, dynamic> args = {
      "filter": filter.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setLogFilter", args);
  }

  @override
  Future<void> setLogLevel(LogLevel level) async {}

  @override
  Future<void> setLowlightEnhanceOptions(
      {required bool enabled,
      required LowlightEnhanceOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {}

  @override
  Future<void> setMaxMetadataSize(int size) async {}

  @override
  Future<void> setMixedAudioFrameParameters(
      {required int sampleRate,
      required int channel,
      required int samplesPerCall}) async {
    final Map<String, dynamic> args = {
      "sampleRate": sampleRate.toString(),
      "channel": channel.toString(),
      "samplesPerCall": samplesPerCall.toString(),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setMixedAudioFrameParameters", args);
  }

  @override
  Future<void> setParameters(String parameters) async {
    final Map<String, dynamic> args = {
      "parameters": parameters,
    };
    engineMethodChannel.invokeMethod("RtcEngine_setParameters", args);
  }

  @override
  Future<void> setPlaybackAudioFrameBeforeMixingParameters(
      {required int sampleRate, required int channel}) async {
    final Map<String, dynamic> args = {
      "sampleRate": sampleRate.toString(),
      "channel": channel.toString(),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setPlaybackAudioFrameBeforeMixingParameters", args);
  }

  @override
  Future<void> setPlaybackAudioFrameParameters(
      {required int sampleRate,
      required int channel,
      required RawAudioFrameOpModeType mode,
      required int samplesPerCall}) async {
    final Map<String, dynamic> args = {
      "sampleRate": sampleRate.toString(),
      "channel": channel.toString(),
      "mode": mode.value().toString(),
      "samplesPerCall": samplesPerCall.toString(),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setPlaybackAudioFrameParameters", args);
  }

  @override
  Future<void> setRecordingAudioFrameParameters(
      {required int sampleRate,
      required int channel,
      required RawAudioFrameOpModeType mode,
      required int samplesPerCall}) async {
    final Map<String, dynamic> args = {
      "sampleRate": sampleRate.toString(),
      "channel": channel.toString(),
      "mode": mode.value().toString(),
      "samplesPerCall": samplesPerCall.toString(),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setRecordingAudioFrameParameters", args);
  }

  @override
  Future<void> setRemoteDefaultVideoStreamType(
      VideoStreamType streamType) async {
    final Map<String, dynamic> args = {
      "streamType": streamType.value().toString(),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setRemoteDefaultVideoStreamType", args);
  }

  @override
  Future<void> setRemoteRenderMode(
      {required int uid,
      required RenderModeType renderMode,
      required VideoMirrorModeType mirrorMode}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "renderMode": renderMode.value().toString(),
      "mirrorMode": mirrorMode.value().toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setRemoteRenderMode", args);
  }

  @override
  Future<void> setRemoteRenderTargetFps(int targetFps) async {}

  @override
  Future<void> setRemoteSubscribeFallbackOption(
      StreamFallbackOptions option) async {}

  @override
  Future<void> setRemoteUserPriority(
      {required int uid, required PriorityType userPriority}) async {}

  @override
  Future<void> setRemoteUserSpatialAudioParams(
      {required int uid, required SpatialAudioParams params}) async {}

  @override
  Future<void> setRemoteVideoStreamType(
      {required int uid, required VideoStreamType streamType}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "streamType": streamType.value().toString(),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setRemoteVideoStreamType", args);
  }

  @override
  Future<void> setRemoteVideoSubscriptionOptions(
      {required int uid, required VideoSubscriptionOptions options}) async {}

  @override
  Future<void> setRemoteVoicePosition(
      {required int uid, required double pan, required double gain}) async {}

  @override
  Future<void> setRouteInCommunicationMode(int route) async {}

  @override
  Future<void> setScreenCaptureContentHint(
      VideoContentHint contentHint) async {}

  @override
  Future<void> setScreenCaptureOrientation(
      {required VideoSourceType type,
      required VideoOrientation orientation}) async {}

  @override
  Future<void> setScreenCaptureScenario(
      ScreenScenarioType screenScenario) async {}

  @override
  Future<void> setSimulcastConfig(SimulcastConfig simulcastConfig) async {}

  @override
  Future<void> setSubscribeAudioAllowlist(
      {required List<int> uidList, required int uidNumber}) async {}

  @override
  Future<void> setSubscribeAudioBlocklist(
      {required List<int> uidList, required int uidNumber}) async {}

  @override
  Future<void> setSubscribeVideoAllowlist(
      {required List<int> uidList, required int uidNumber}) async {}

  @override
  Future<void> setSubscribeVideoBlocklist(
      {required List<int> uidList, required int uidNumber}) async {}

  @override
  Future<void> setVideoDenoiserOptions(
      {required bool enabled,
      required VideoDenoiserOptions options,
      MediaSourceType type = MediaSourceType.primaryCameraSource}) async {}

  @override
  Future<void> setVideoEncoderConfiguration(
      VideoEncoderConfiguration config) async {
    final Map<String, dynamic> args = {"config": jsonEncode(config.toJson())};
    engineMethodChannel.invokeMethod(
        "RtcEngine_setVideoEncoderConfiguration", args);
  }

  @override
  Future<void> setVideoQoEPreference(
      VideoQoePreferenceType qoePreference) async {}

  @override
  Future<void> setVideoScenario(
      VideoApplicationScenarioType scenarioType) async {}

  @override
  Future<void> setVoiceBeautifierParameters(
      {required VoiceBeautifierPreset preset,
      required int param1,
      required int param2}) async {}

  @override
  Future<void> setVoiceBeautifierPreset(VoiceBeautifierPreset preset) async {}

  @override
  Future<void> setVoiceConversionParameters(
      {required VoiceConversionPreset preset,
      required int param1,
      required int param2}) async {}

  @override
  Future<void> setVoiceConversionPreset(VoiceConversionPreset preset) async {}

  @override
  Future<void> setVolumeOfEffect(
      {required int soundId, required int volume}) async {
    final Map<String, dynamic> args = {
      "soundId": soundId.toString(),
      "volume": volume.toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setVolumeOfEffect", args);
  }

  @override
  Future<void> setupLocalVideo(VideoCanvas canvas) async {
    print("agora_rtc_ng flutter setupLocalVideo ${canvas.toJson()}");
    engineMethodChannel.invokeMethod("RtcEngine_setupLocalVideo", {
      "uid": canvas.uid,
      "xcomponentId": canvas.xcomponentId,
      "canvas": jsonEncode(canvas.toJson())
    });
  }

  @override
  Future<void> setupRemoteVideo(VideoCanvas canvas) async {
    engineMethodChannel.invokeMethod("RtcEngine_setupRemoteVideo", {
      "uid": canvas.uid,
      "xcomponentId": canvas.xcomponentId,
      "canvas": jsonEncode(canvas.toJson())
    });
  }

  @override
  Future<void> startAudioFrameDump(
      {required String channelId,
      required int uid,
      required String location,
      required String uuid,
      required String passwd,
      required int durationMs,
      required bool autoUpload}) async {}

  @override
  Future<void> startAudioMixing(
      {required String filePath,
      required bool loopback,
      required int cycle,
      int startPos = 0}) async {
    final Map<String, dynamic> args = {
      "filePath": filePath,
      "loopback": loopback,
      "cycle": cycle.toString(),
      "startPos": startPos.toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_startAudioMixing", args);
  }

  @override
  Future<void> startAudioRecording(AudioRecordingConfiguration config) async {
    final Map<String, dynamic> args = {
      "filePath": config.filePath,
      "quality": (config.quality?.value() ?? 0).toString(),
    };
    engineMethodChannel.invokeMethod("RtcEngine_startAudioRecording", args);
  }

  @override
  Future<void> startCameraCapture(
      {required VideoSourceType sourceType,
      required CameraCapturerConfiguration config}) async {
    final Map<String, dynamic> args = {
      "sourceType": sourceType.value().toString(),
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_startCameraCapture", args);
  }

  @override
  Future<void> startDirectCdnStreaming(
      {required DirectCdnStreamingEventHandler eventHandler,
      required String publishUrl,
      required DirectCdnStreamingMediaOptions options}) async {}

  @override
  Future<void> startEchoTest(EchoTestConfiguration config) async {}

  @override
  Future<void> startLastmileProbeTest(LastmileProbeConfig config) async {
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_startLastmileProbeTest", args);
  }

  @override
  Future<void> startLocalAudioMixer(
      LocalAudioMixerConfiguration config) async {}

  @override
  Future<void> startLocalVideoTranscoder(
      LocalTranscoderConfiguration config) async {
    var json = jsonEncode(config.toJson());
    json = json.replaceFirst("videoInputStreams", "transcodingVideoStreams");
    final Map<String, dynamic> args = {
      "config": json,
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_startLocalVideoTranscoder", args);
  }

  @override
  Future<void> startMediaRenderingTracing() async {}

  @override
  Future<void> startOrUpdateChannelMediaRelay(
      ChannelMediaRelayConfiguration configuration) async {
    final Map<String, dynamic> args = {
      "config": jsonEncode(configuration.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_startOrUpdateChannelMediaRelay", args);
  }

  @override
  Future<void> startPreview(
      {VideoSourceType sourceType =
          VideoSourceType.videoSourceCameraPrimary}) async {
    engineMethodChannel.invokeMethod("RtcEngine_startPreview",
        {"sourceType": sourceType.value().toString()});
  }

  @override
  Future<void> startPreviewWithoutSourceType() async {}

  @override
  Future<void> startRhythmPlayer(
      {required String sound1,
      required String sound2,
      required AgoraRhythmPlayerConfig config}) async {}

  @override
  Future<void> startRtmpStreamWithTranscoding(
      {required String url, required LiveTranscoding transcoding}) async {
    final Map<String, dynamic> args = {
      "url": url,
      "transcoding": jsonEncode(transcoding.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_startRtmpStreamWithTranscoding", args);
  }

  @override
  Future<void> startRtmpStreamWithoutTranscoding(String url) async {
    final Map<String, dynamic> args = {
      "url": url,
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_startRtmpStreamWithoutTranscoding", args);
  }

  @override
  Future<void> startScreenCapture(
      ScreenCaptureParameters2 captureParams) async {
    final Map<String, dynamic> args = {
      "captureParams": jsonEncode(captureParams.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_startScreenCapture", args);
  }

  @override
  Future<void> startScreenCaptureByDisplayId(
      {required int displayId,
      required Rectangle regionRect,
      required ScreenCaptureParameters captureParams}) async {}

  @override
  Future<void> startScreenCaptureByScreenRect(
      {required Rectangle screenRect,
      required Rectangle regionRect,
      required ScreenCaptureParameters captureParams}) async {}

  @override
  Future<void> startScreenCaptureBySourceType(
      {required VideoSourceType sourceType,
      required ScreenCaptureConfiguration config}) async {}

  @override
  Future<void> startScreenCaptureByWindowId(
      {required int windowId,
      required Rectangle regionRect,
      required ScreenCaptureParameters captureParams}) async {}

  @override
  Future<void> stopAllEffects() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_stopAllEffects", args);
  }

  @override
  Future<void> stopAudioFrameDump(
      {required String channelId,
      required int uid,
      required String location}) async {}

  @override
  Future<void> stopAudioMixing() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_stopAudioMixing", args);
  }

  @override
  Future<void> stopAudioRecording() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_stopAudioRecording", args);
  }

  @override
  Future<void> stopCameraCapture(VideoSourceType sourceType) async {
    final Map<String, dynamic> args = {
      "sourceType": sourceType.value().toString()
    };
    engineMethodChannel.invokeMethod("RtcEngine_stopCameraCapture", args);
  }

  @override
  Future<void> stopChannelMediaRelay() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_stopChannelMediaRelay", args);
  }

  @override
  Future<void> stopDirectCdnStreaming() async {}

  @override
  Future<void> stopEchoTest() async {}

  @override
  Future<void> stopEffect(int soundId) async {
    final Map<String, dynamic> args = {"soundId": soundId.toString()};
    engineMethodChannel.invokeMethod("RtcEngine_stopEffect", args);
  }

  @override
  Future<void> stopLastmileProbeTest() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_stopLastmileProbeTest", args);
  }

  @override
  Future<void> stopLocalAudioMixer() async {}

  @override
  Future<void> stopLocalVideoTranscoder() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod(
        "RtcEngine_stopLocalVideoTranscoder", args);
  }

  @override
  Future<void> stopPreview(
      {VideoSourceType sourceType =
          VideoSourceType.videoSourceCameraPrimary}) async {
    final Map<String, dynamic> args = {
      "sourceType": sourceType.value().toString()
    };
    engineMethodChannel.invokeMethod("RtcEngine_stopPreview", args);
  }

  @override
  Future<void> stopRhythmPlayer() async {}

  @override
  Future<void> stopRtmpStream(String url) async {
    final Map<String, dynamic> args = {"url": url};
    engineMethodChannel.invokeMethod("RtcEngine_stopRtmpStream", args);
  }

  @override
  Future<void> stopScreenCapture() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_stopScreenCapture", args);
  }

  @override
  Future<void> stopScreenCaptureBySourceType(
      VideoSourceType sourceType) async {}

  @override
  Future<void> switchCamera() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_switchCamera", args);
  }

  @override
  Future<void> takeSnapshot(
      {required int uid, required String filePath}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "filePath": filePath,
    };
    engineMethodChannel.invokeMethod("RtcEngine_takeSnapshot", args);
  }

  @override
  Future<void> takeSnapshotWithConfig(
      {required int uid, required SnapshotConfig config}) async {}

  @override
  Future<void> unloadAllEffects() async {
    final Map<String, dynamic> args = {};
    engineMethodChannel.invokeMethod("RtcEngine_unloadAllEffects", args);
  }

  @override
  Future<void> unloadEffect(int soundId) async {
    final Map<String, dynamic> args = {"soundId": soundId.toString()};
    engineMethodChannel.invokeMethod("RtcEngine_unloadEffect", args);
  }

  @override
  void unregisterAudioEncodedFrameObserver(
      AudioEncodedFrameObserver observer) {}

  @override
  void unregisterAudioSpectrumObserver(AudioSpectrumObserver observer) {}

  @override
  void unregisterEventHandler(RtcEngineEventHandler eventHandler) {
    _eventHandler.remove(eventHandler);
  }

  @override
  void unregisterMediaMetadataObserver(
      {required MetadataObserver observer, required MetadataType type}) {
    // TODO: implement unregisterMediaMetadataObserver
  }

  @override
  Future<void> updateChannelMediaOptions(ChannelMediaOptions options) async {
    final Map<String, dynamic> args = {"options": jsonEncode(options.toJson())};
    engineMethodChannel.invokeMethod(
        "RtcEngine_updateChannelMediaOptions", args);
  }

  @override
  Future<void> updateDirectCdnStreamingMediaOptions(
      DirectCdnStreamingMediaOptions options) async {}

  @override
  Future<void> updateLocalAudioMixerConfiguration(
      LocalAudioMixerConfiguration config) async {}

  @override
  Future<void> updateLocalTranscoderConfiguration(
      LocalTranscoderConfiguration config) async {
    var json = jsonEncode(config.toJson());
    json = json.replaceFirst("videoInputStreams", "transcodingVideoStreams");
    final Map<String, dynamic> args = {
      "config": json,
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_updateLocalTranscoderConfiguration", args);
  }

  @override
  Future<void> updatePreloadChannelToken(String token) async {}

  @override
  Future<void> updateRtmpTranscoding(LiveTranscoding transcoding) async {
    final Map<String, dynamic> args = {
      "transcoding": jsonEncode(transcoding.toJson())
    };
    engineMethodChannel.invokeMethod("RtcEngine_updateRtmpTranscoding", args);
  }

  @override
  Future<void> updateScreenCapture(
      ScreenCaptureParameters2 captureParams) async {}

  @override
  Future<void> updateScreenCaptureParameters(
      ScreenCaptureParameters captureParams) async {
    final Map<String, dynamic> args = {
      "captureParams": jsonEncode(captureParams.toJson())
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_updateScreenCaptureParameters", args);
  }

  @override
  Future<void> updateScreenCaptureRegion(Rectangle regionRect) async {}

  @override
  Future<String> uploadLogFile() {
    return Future.value("");
  }

  @override
  Future<void> writeLog({required LogLevel level, required String fmt}) async {}

  /*---------------------------------------------RtcEngineEx----------------------------------------------------------------------------------------------*/

  @override
  Future<void> addVideoWatermarkEx(
      {required String watermarkUrl,
      required WatermarkOptions options,
      required RtcConnection connection}) async {}

  @override
  Future<void> adjustRecordingSignalVolumeEx(
      {required int volume, required RtcConnection connection}) async {}

  @override
  Future<void> adjustUserPlaybackSignalVolumeEx(
      {required int uid,
      required int volume,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "volume": volume.toString(),
      "connection": jsonEncode(connection.toJson())
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_adjustUserPlaybackSignalVolumeEx", args);
  }

  @override
  Future<void> clearVideoWatermarkEx(RtcConnection connection) async {}

  @override
  Future<int> createDataStreamEx(
      {required DataStreamConfig config, required RtcConnection connection}) {
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
      "connection": jsonEncode(connection.toJson())
    };
    return engineMethodChannel
        .invokeMethod<int>("RtcEngine_createDataStreamEx", args)
        .then((value) => value ?? 0);
  }

  @override
  Future<void> enableAudioVolumeIndicationEx(
      {required int interval,
      required int smooth,
      required bool reportVad,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "interval": interval.toString(),
      "smooth": smooth.toString(),
      "reportVad": reportVad,
      "connection": jsonEncode(connection.toJson())
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_enableAudioVolumeIndicationEx", args);
  }

  @override
  Future<void> enableContentInspectEx(
      {required bool enabled,
      required ContentInspectConfig config,
      required RtcConnection connection}) async {}

  @override
  Future<void> enableDualStreamModeEx(
      {required bool enabled,
      required SimulcastStreamConfig streamConfig,
      required RtcConnection connection}) async {}

  @override
  Future<void> enableEncryptionEx(
      {required RtcConnection connection,
      required bool enabled,
      required EncryptionConfig config}) async {
    final Map<String, dynamic> args = {
      "enabled": enabled,
      "connection": jsonEncode(connection.toJson()),
      "config": jsonEncode(config.toJson())
    };
    engineMethodChannel.invokeMethod("RtcEngine_enableEncryptionEx", args);
  }

  @override
  Future<void> enableLoopbackRecordingEx(
      {required RtcConnection connection,
      required bool enabled,
      String? deviceName}) async {}

  @override
  Future<String> getCallIdEx(RtcConnection connection) {
    final Map<String, dynamic> args = {
      "connection": jsonEncode(connection.toJson()),
    };
    return engineMethodChannel
        .invokeMethod<String>("RtcEngine_getCallIdEx", args)
        .then((value) => value ?? "");
  }

  @override
  Future<ConnectionStateType> getConnectionStateEx(RtcConnection connection) {
    return Future.value(ConnectionStateType.connectionStateConnected);
  }

  @override
  Future<UserInfo> getUserInfoByUidEx(
      {required int uid, required RtcConnection connection}) {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "connection": jsonEncode(connection.toJson()),
    };
    return engineMethodChannel
        .invokeMethod<String>("RtcEngine_getUserInfoByUidEx", args)
        .then((value) => jsonDecode(value ?? ""));
  }

  @override
  Future<UserInfo> getUserInfoByUserAccountEx(
      {required String userAccount, required RtcConnection connection}) {
    final Map<String, dynamic> args = {
      "userAccount": userAccount,
      "connection": jsonEncode(connection.toJson()),
    };
    return engineMethodChannel
        .invokeMethod<String>("RtcEngine_getUserInfoByUserAccountEx", args)
        .then((value) => jsonDecode(value ?? ""));
  }

  @override
  Future<void> joinChannelEx(
      {required String token,
      required RtcConnection connection,
      required ChannelMediaOptions options}) async {
    final Map<String, dynamic> args = {
      "token": token,
      "connection": jsonEncode(connection.toJson()),
      "options": jsonEncode(options.toJson())
    };
    engineMethodChannel.invokeMethod("RtcEngine_joinChannelEx", args);
  }

  @override
  Future<void> leaveChannelEx(
      {required RtcConnection connection, LeaveChannelOptions? options}) async {
    final Map<String, dynamic> args = {
      "connection": jsonEncode(connection.toJson()),
      "options": options != null ? jsonEncode(options.toJson()) : ""
    };
    engineMethodChannel.invokeMethod("RtcEngine_leaveChannelEx", args);
  }

  @override
  Future<void> leaveChannelWithUserAccountEx(
      {required String channelId,
      required String userAccount,
      LeaveChannelOptions? options}) async {}

  @override
  Future<void> muteAllRemoteAudioStreamsEx(
      {required bool mute, required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "mute": mute,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_muteAllRemoteAudioStreamsEx", args);
  }

  @override
  Future<void> muteAllRemoteVideoStreamsEx(
      {required bool mute, required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "mute": mute,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_muteAllRemoteVideoStreamsEx", args);
  }

  @override
  Future<void> muteLocalAudioStreamEx(
      {required bool mute, required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "mute": mute,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_muteLocalAudioStreamEx", args);
  }

  @override
  Future<void> muteLocalVideoStreamEx(
      {required bool mute, required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "mute": mute,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_muteLocalVideoStreamEx", args);
  }

  @override
  Future<void> muteRecordingSignalEx(
      {required bool mute, required RtcConnection connection}) async {}

  @override
  Future<void> muteRemoteAudioStreamEx(
      {required int uid,
      required bool mute,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "mute": mute,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_muteRemoteAudioStreamEx", args);
  }

  @override
  Future<void> muteRemoteVideoStreamEx(
      {required int uid,
      required bool mute,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "mute": mute,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_muteRemoteVideoStreamEx", args);
  }

  @override
  Future<void> pauseAllChannelMediaRelayEx(RtcConnection connection) async {}

  @override
  Future<void> resumeAllChannelMediaRelayEx(RtcConnection connection) async {}

  @override
  Future<void> sendAudioMetadataEx(
      {required RtcConnection connection,
      required Uint8List metadata,
      required int length}) async {}

  @override
  Future<void> sendCustomReportMessageEx(
      {required String id,
      required String category,
      required String event,
      required String label,
      required int value,
      required RtcConnection connection}) async {}

  @override
  Future<void> sendStreamMessageEx(
      {required int streamId,
      required Uint8List data,
      required int length,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "streamId": streamId.toString(),
      "length": length.toString(),
      "data": data,
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_sendStreamMessageEx", args);
  }

  @override
  Future<void> setDualStreamModeEx(
      {required SimulcastStreamMode mode,
      required SimulcastStreamConfig streamConfig,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "mode": mode.value().toString(),
      "streamConfig": jsonEncode(streamConfig.toJson()),
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setDualStreamModeEx", args);
  }

  @override
  Future<void> setHighPriorityUserListEx(
      {required List<int> uidList,
      required int uidNum,
      required StreamFallbackOptions option,
      required RtcConnection connection}) async {}

  @override
  Future<void> setParametersEx(
      {required RtcConnection connection, required String parameters}) async {}

  @override
  Future<void> setRemoteRenderModeEx(
      {required int uid,
      required RenderModeType renderMode,
      required VideoMirrorModeType mirrorMode,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "renderMode": renderMode.value().toString(),
      "mirrorMode": mirrorMode.value().toString(),
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_setRemoteRenderModeEx", args);
  }

  @override
  Future<void> setRemoteUserSpatialAudioParamsEx(
      {required int uid,
      required SpatialAudioParams params,
      required RtcConnection connection}) async {}

  @override
  Future<void> setRemoteVideoStreamTypeEx(
      {required int uid,
      required VideoStreamType streamType,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "uid": uid.toString(),
      "streamType": streamType.value().toString(),
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setRemoteVideoStreamTypeEx", args);
  }

  @override
  Future<void> setRemoteVideoSubscriptionOptionsEx(
      {required int uid,
      required VideoSubscriptionOptions options,
      required RtcConnection connection}) async {}

  @override
  Future<void> setRemoteVoicePositionEx(
      {required int uid,
      required double pan,
      required double gain,
      required RtcConnection connection}) async {}

  @override
  Future<void> setSimulcastConfigEx(
      {required SimulcastConfig simulcastConfig,
      required RtcConnection connection}) async {}

  @override
  Future<void> setSubscribeAudioAllowlistEx(
      {required List<int> uidList,
      required int uidNumber,
      required RtcConnection connection}) async {}

  @override
  Future<void> setSubscribeAudioBlocklistEx(
      {required List<int> uidList,
      required int uidNumber,
      required RtcConnection connection}) async {}

  @override
  Future<void> setSubscribeVideoAllowlistEx(
      {required List<int> uidList,
      required int uidNumber,
      required RtcConnection connection}) async {}

  @override
  Future<void> setSubscribeVideoBlocklistEx(
      {required List<int> uidList,
      required int uidNumber,
      required RtcConnection connection}) async {}

  @override
  Future<void> setVideoEncoderConfigurationEx(
      {required VideoEncoderConfiguration config,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "config": jsonEncode(config.toJson()),
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_setVideoEncoderConfigurationEx", args);
  }

  @override
  Future<void> setupRemoteVideoEx(
      {required VideoCanvas canvas, required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "uid": canvas.uid,
      "xcomponentId": canvas.xcomponentId,
      "connection": jsonEncode(connection.toJson()),
      "canvas": jsonEncode(canvas.toJson())
    };
    engineMethodChannel.invokeMethod("RtcEngine_setupRemoteVideoEx", args);
  }

  @override
  Future<void> startMediaRenderingTracingEx(RtcConnection connection) async {}

  @override
  Future<void> startOrUpdateChannelMediaRelayEx(
      {required ChannelMediaRelayConfiguration configuration,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "configuration": jsonEncode(configuration.toJson()),
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_startOrUpdateChannelMediaRelayEx", args);
  }

  @override
  Future<void> startRtmpStreamWithTranscodingEx(
      {required String url,
      required LiveTranscoding transcoding,
      required RtcConnection connection}) async {}

  @override
  Future<void> startRtmpStreamWithoutTranscodingEx(
      {required String url, required RtcConnection connection}) async {}

  @override
  Future<void> stopChannelMediaRelayEx(RtcConnection connection) async {
    final Map<String, dynamic> args = {
      "connection": jsonEncode(connection.toJson()),
    };
    engineMethodChannel.invokeMethod("RtcEngine_stopChannelMediaRelayEx", args);
  }

  @override
  Future<void> stopRtmpStreamEx(
      {required String url, required RtcConnection connection}) async {}

  @override
  Future<void> takeSnapshotEx(
      {required RtcConnection connection,
      required int uid,
      required String filePath}) async {
    final Map<String, dynamic> args = {
      "connection": jsonEncode(connection.toJson()),
      "uid": uid.toString(),
      "filePath": filePath,
    };
    engineMethodChannel.invokeMethod("RtcEngine_takeSnapshotEx", args);
  }

  @override
  Future<void> takeSnapshotWithConfigEx(
      {required RtcConnection connection,
      required int uid,
      required SnapshotConfig config}) async {}

  @override
  Future<void> updateChannelMediaOptionsEx(
      {required ChannelMediaOptions options,
      required RtcConnection connection}) async {
    final Map<String, dynamic> args = {
      "connection": jsonEncode(connection.toJson()),
      "options": jsonEncode(options.toJson()),
    };
    engineMethodChannel.invokeMethod(
        "RtcEngine_updateChannelMediaOptionsEx", args);
  }

  @override
  Future<void> updateRtmpTranscodingEx(
      {required LiveTranscoding transcoding,
      required RtcConnection connection}) async {}
}
