import 'dart:convert';
import 'dart:io';

import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:sc1_manage_tool/util/my_log.dart';

/// The uri for webrtc, for example, [FlutterLive.rtc]:
///   webrtc://d.ossrs.net:11985/live/livestream
/// is parsed as a WebRTCUri:
///   api: http://d.ossrs.net:11985/rtc/v1/play/
///   streamUrl: "webrtc://d.ossrs.net:11985/live/livestream"
class WebRTCUri {
  /// The api server url for WebRTC streaming.
  String? api;

  /// The stream url to play or publish.
  String? streamUrl;

  /// Parse the url to WebRTC uri.
  static WebRTCUri parse(String url) {
    Uri uri = Uri.parse(url);

    String? schema = 'https'; // For native, default to HTTPS
    if (uri.queryParameters.containsKey('schema')) {
      schema = uri.queryParameters['schema'];
    } else {
      schema = 'http';
    }

    var port = (uri.port > 0) ? uri.port : 443;
    if (schema == 'https') {
      port = (uri.port > 0) ? uri.port : 443;
    } else if (schema == 'http') {
      port = (uri.port > 0) ? uri.port : 1985;
    }

    String? api = '/rtc/v1/play/';
    if (uri.queryParameters.containsKey('play')) {
      api = uri.queryParameters['play'];
    }

    var apiParams = [];
    for (var key in uri.queryParameters.keys) {
      if (key != 'api' && key != 'play' && key != 'schema') {
        apiParams.add('$key=${uri.queryParameters[key]}');
      }
    }

    var apiUrl = '$schema://${uri.host}:$port$api';
    if (apiParams.isNotEmpty) {
      apiUrl += '?${apiParams.join('&')}';
    }

    WebRTCUri r = WebRTCUri();
    r.api = apiUrl;
    r.streamUrl = url;
    print('Url $url parsed to api=${r.api}, stream=${r.streamUrl}');
    return r;
  }
}

typedef AddStreamCallback = void Function(MediaStream stream);

class WebRTCPlayer {
  WebRTCPlayer({
    this.onLocalConnectionState,
    this.onSignalingState,
  });

  String tag = "WebRTCPlayer";
  bool debug = false;
  RTCPeerConnection? _localPeerConnection;
  AddStreamCallback? _onRemoteStream;
  MediaStream? _localStream;
  RTCRtpSender? _videoSender;
  RTCRtpSender? _audioSender;

  /// RTC 对等连接状态回调
  Function(RTCPeerConnectionState state)? onLocalConnectionState;

  /// RTC 信令状态回调
  Function(RTCSignalingState state)? onSignalingState;

  List<MediaDeviceInfo>? _mediaDevicesList;

  final _configuration = <String, dynamic>{
    'iceServers': [
      {'urls': 'stun:stun.l.google.com:19302'},
    ],
    'sdpSemantics': 'unified-plan',
    'encodedInsertableStreams': true,
  };

  final _constraints = <String, dynamic>{
    'mandatory': {},
    'optional': [
      {'DtlsSrtpKeyAgreement': false},
    ],
  };

  // Map<String, dynamic> _getMediaConstraints({audio = true, video = true}) {
  //   return {
  //     'audio': audio ? true : false,
  //     'video': video
  //         ? {
  //             'mandatory': {
  //               'minWidth': '640',
  //               'minHeight': '480',
  //               'minFrameRate': '30',
  //             },
  //             'facingMode': 'user',
  //             'optional': [],
  //           }
  //         : false,
  //   };
  // }

  /// When got a remote stream.
  set onRemoteStream(AddStreamCallback v) {
    _onRemoteStream = v;
  }

  /// Initialize the player.
  void initState() async {
    _mediaDevicesList = await navigator.mediaDevices.enumerateDevices();
    debugLog("_mediaDevicesList:$_mediaDevicesList");
    navigator.mediaDevices.ondevicechange = (event) async {
      debugLog('++++++ ondevicechange ++++++');
      _mediaDevicesList = await navigator.mediaDevices.enumerateDevices();
    };
  }

  RTCPeerConnection? get localPeerConnection => _localPeerConnection;

  /// Start play a url.
  /// [url] must a path parsed by [WebRTCUri.parse] in https://github.com/rtcdn/rtcdn-draft
  Future<void> play(String url) async {
    await _localPeerConnection
        ?.close()
        .onError((error, stackTrace) => MyLog.e(tag, "$error"));

    // Create the peer connection.
    // {
    //   // AddTransceiver is only available with Unified Plan SdpSemantics
    //   'sdpSemantics': "unified-plan"
    // }
    _localPeerConnection = await createPeerConnection(
      _configuration,
      _constraints,
    );
    // _localPeerConnection?.onConnectionState =

    debugLog('WebRTC: createPeerConnection done');

    // _startVideo();
    // _startAudio();

    // Setup the peer connection.
    _localPeerConnection?.onAddStream = (MediaStream stream) {
      debugLog('WebRTC: got stream ${stream.id}');
      if (_onRemoteStream == null) {
        debugLog('Warning: Stream ${stream.id} is leak');
        return;
      }
      _onRemoteStream!(stream);
    };

    _localPeerConnection?.onConnectionState = onLocalConnectionState;
    _localPeerConnection?.onSignalingState = onSignalingState;
    _localPeerConnection?.addTransceiver(
      kind: RTCRtpMediaType.RTCRtpMediaTypeAudio,
      init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly),
    );
    _localPeerConnection?.addTransceiver(
      kind: RTCRtpMediaType.RTCRtpMediaTypeVideo,
      init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly),
    );
    debugLog('WebRTC: Setup PC done, A|V RecvOnly');

    // Start SDP handshake.
    RTCSessionDescription offer = await _localPeerConnection!.createOffer({
      'mandatory': {'OfferToReceiveAudio': true, 'OfferToReceiveVideo': true},
      'optional': [],
    });
    await _localPeerConnection?.setLocalDescription(offer);
    debugLog(
        'WebRTC: createOffer, ${offer.type} is ${offer.sdp?.replaceAll('\n', '\\n').replaceAll('\r', '\\r')}');

    RTCSessionDescription answer = await _handshake(url, offer.sdp!);
    debugLog(
        'WebRTC: got ${answer.type} is ${answer.sdp?.replaceAll('\n', '\\n').replaceAll('\r', '\\r')}');

    await _localPeerConnection?.setRemoteDescription(answer);
  }

  // void _startVideo() async {
  //   var newStream = await navigator.mediaDevices
  //       .getUserMedia(_getMediaConstraints(audio: false, video: true));
  //   if (_localStream != null) {
  //     await _removeExistingVideoTrack();
  //     var tracks = newStream.getVideoTracks();
  //     for (var newTrack in tracks) {
  //       await _localStream!.addTrack(newTrack);
  //     }
  //   } else {
  //     _localStream = newStream;
  //   }
  //
  //   await _addOrReplaceVideoTracks();
  // }

  // void _startAudio() async {
  //   var newStream = await navigator.mediaDevices
  //       .getUserMedia(_getMediaConstraints(audio: true, video: false));
  //
  //   if (_localStream != null) {
  //     await _removeExistingAudioTrack();
  //     for (var newTrack in newStream.getAudioTracks()) {
  //       await _localStream!.addTrack(newTrack);
  //     }
  //   } else {
  //     _localStream = newStream;
  //   }
  //
  //   await _addOrReplaceAudioTracks();
  // }

  // Future<void> _removeExistingVideoTrack({bool fromConnection = false}) async {
  //   var tracks = _localStream!.getVideoTracks();
  //   for (var i = tracks.length - 1; i >= 0; i--) {
  //     var track = tracks[i];
  //     if (fromConnection) {
  //       await _connectionRemoveTrack(track);
  //     }
  //     await _localStream!.removeTrack(track);
  //     await track.stop();
  //   }
  // }

  // Future<void> _removeExistingAudioTrack({bool fromConnection = false}) async {
  //   var tracks = _localStream!.getAudioTracks();
  //   for (var i = tracks.length - 1; i >= 0; i--) {
  //     var track = tracks[i];
  //     if (fromConnection) {
  //       await _connectionRemoveTrack(track);
  //     }
  //     await _localStream!.removeTrack(track);
  //     await track.stop();
  //   }
  // }

  // Future<void> _addOrReplaceVideoTracks() async {
  //   for (var track in _localStream!.getVideoTracks()) {
  //     await _connectionAddTrack(track, _localStream!);
  //   }
  // }

  // Future<void> _addOrReplaceAudioTracks() async {
  //   for (var track in _localStream!.getAudioTracks()) {
  //     await _connectionAddTrack(track, _localStream!);
  //   }
  // }

  // Future<void> _connectionAddTrack(
  //     MediaStreamTrack track, MediaStream stream) async {
  //   var sender = track.kind == 'video' ? _videoSender : _audioSender;
  //   if (sender != null) {
  //     MyLog.i(tag, 'Have a Sender of kind:${track.kind}');
  //     var trans = await _getSendersTransceiver(sender.senderId);
  //     if (trans != null) {
  //       MyLog.i(tag, 'Setting direction and replacing track with new track');
  //       await trans.setDirection(TransceiverDirection.SendOnly);
  //       await trans.sender.replaceTrack(track);
  //     }
  //   } else {
  //     if (track.kind == 'video') {
  //       _videoSender = await _localPeerConnection!.addTrack(track, stream);
  //     } else {
  //       _audioSender = await _localPeerConnection!.addTrack(track, stream);
  //     }
  //   }
  // }

  // Future<void> _connectionRemoveTrack(MediaStreamTrack track) async {
  //   var sender = track.kind == 'video' ? _videoSender : _audioSender;
  //   if (sender != null) {
  //     MyLog.i(tag, 'Have a Sender of kind:${track.kind}');
  //     var trans = await _getSendersTransceiver(sender.senderId);
  //     if (trans != null) {
  //       MyLog.i(tag, 'Setting direction and replacing track with null');
  //       await trans.setDirection(TransceiverDirection.Inactive);
  //       await trans.sender.replaceTrack(null);
  //     }
  //   }
  // }

  // Future<RTCRtpTransceiver?> _getSendersTransceiver(String senderId) async {
  //   RTCRtpTransceiver? foundTrans;
  //   var trans = await _localPeerConnection!.getTransceivers();
  //   for (var tran in trans) {
  //     if (tran.sender.senderId == senderId) {
  //       foundTrans = tran;
  //       break;
  //     }
  //   }
  //   return foundTrans;
  // }

  /// Handshake to exchange SDP, send offer and got answer.
  Future<RTCSessionDescription> _handshake(String url, String offer) async {
    // Setup the client for HTTP or HTTPS.
    HttpClient client = HttpClient();

    try {
      // Allow self-sign certificate, see https://api.flutter.dev/flutter/dart-io/HttpClient/badCertificateCallback.html
      client.badCertificateCallback =
          (X509Certificate cert, String host, int port) => true;

      // Parsing the WebRTC uri form url.
      WebRTCUri uri = WebRTCUri.parse(url);

      // Do signaling for WebRTC.
      // @see https://github.com/rtcdn/rtcdn-draft
      //
      // POST http://d.ossrs.net:11985/rtc/v1/play/
      //    {api: "xxx", sdp: "offer", streamurl: "webrtc://d.ossrs.net:11985/live/livestream"}
      // Response:
      //    {code: 0, sdp: "answer", sessionid: "007r51l7:X2Lv"}
      HttpClientRequest req = await client.postUrl(Uri.parse(uri.api!));
      req.headers.set('Content-Type', 'application/json');
      req.add(utf8.encode(json
          .encode({'api': uri.api, 'streamurl': uri.streamUrl, 'sdp': offer})));
      debugLog('WebRTC request: ${uri.api} offer=${offer.length}B');

      HttpClientResponse res = await req.close();
      String reply = await res.transform(utf8.decoder).join();
      debugLog('WebRTC reply: ${reply.length}B, ${res.statusCode}');

      Map<String, dynamic> o = json.decode(reply);
      if (!o.containsKey('code') || !o.containsKey('sdp') || o['code'] != 0) {
        return Future.error(reply);
      }

      return Future.value(RTCSessionDescription(o['sdp'], 'answer'));
    } finally {
      client.close();
    }
  }

  /// Dispose the player.
  void dispose() async {
    try {
      await _localStream?.dispose();
      await _localPeerConnection?.close();
      await _localPeerConnection?.dispose();
      await _videoSender?.dispose();
      await _audioSender?.dispose();
    } catch (e) {
      MyLog.e(tag, e.toString());
    }
  }

  void debugLog(String msg) {
    if (debug) MyLog.i(tag, msg);
  }
}
