import 'dart:async';
import 'dart:convert';

import 'package:flutter/cupertino.dart';
import 'package:flutter/services.dart';
import 'package:flutter_sunmi_uvc_camera/flutter_sunmi_uvc_camera.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';

class UVCCameraController {
  static const String _channelName = "flutter_sunmi_uvc_camera/channel";

  UVCCameraState _cameraState = UVCCameraState.closed;

  /// 摄像头状态回调
  Function(UVCCameraState)? cameraStateCallback;

  /// 拍照按钮回调
  Function(String path)? clickTakePictureButtonCallback;
  UVCCameraState get getCameraState => _cameraState;
  String _cameraErrorMsg = '';
  String get getCameraErrorMsg => _cameraErrorMsg;
  String _takePicturePath = '';
  String get getTakePicturePath => _takePicturePath;

  final StreamController<InputImage> _cameraStreamController =
      StreamController<InputImage>.broadcast();

  Stream<InputImage> get cameraStream => _cameraStreamController.stream;
  final StreamController<dynamic> _messageStreamController =
      StreamController<dynamic>.broadcast();
  Stream<dynamic> get messageStream => _messageStreamController.stream;

  List<PreviewSize> _previewSizes = [];
  List<PreviewSize> get getPreviewSizes => _previewSizes;

  MethodChannel? _cameraChannel;

  ///初始化
  UVCCameraController() {
    _cameraChannel = const MethodChannel(_channelName);
    _cameraChannel?.setMethodCallHandler(_methodChannelHandler);
    debugPrint("------> UVCCameraController init");
  }

  void dispose() {
    _cameraChannel?.setMethodCallHandler(null);
    _cameraChannel = null;
    debugPrint("------> UVCCameraController dispose");
  }

  ///接收来自Android的消息
  Future<void> _methodChannelHandler(MethodCall call) async {
    switch (call.method) {
      case "callFlutter":
        _messageStreamController.sink.add(call.arguments);
        break;
      case "takePictureSuccess":
        _takePictureSuccess(call.arguments);
        break;
      case "CameraState":
        _setCameraState(call.arguments.toString());
        break;
      case "onEncodeData":
        // final Map<dynamic, dynamic> args = call.arguments;
        // capture H264 & AAC only
        // debugPrint(args.toString());
        // _cameraStreamController.sink.add(args);
        break;
      case "onPreviewData":
        // debugPrint(
        //     "type:${call.arguments['format']},width:${call.arguments['width']},height:${call.arguments['height']},data:${call.arguments['data'].length}");
        // _cameraStreamController.sink.add(call.arguments);
        try {
          final double width = double.parse('${call.arguments['width']}');
          final double height = double.parse('${call.arguments['height']}');
          final bytes = Uint8List.fromList(call.arguments['data']);
          final inputImage = InputImage.fromBytes(
            bytes: bytes,
            metadata: InputImageMetadata(
              size: Size(width, height),
              rotation: InputImageRotation.rotation0deg, // used only in Android
              format: InputImageFormat.nv21, // used only in iOS
              bytesPerRow: 0,
            ),
          );
          _cameraStreamController.sink.add(inputImage);
        } catch (e) {
          debugPrint('UVCCameraController.onPreviewData--->$e');
        }
        break;
    }
  }

  Future<void> initializeCamera() async {
    await _cameraChannel?.invokeMethod('initializeCamera');
  }

  Future getCameraList() async {
    var data = await _cameraChannel?.invokeMethod('getCameraList');
    return data;
  }

  Future<void> openUVCCamera() async {
    debugPrint("openUVCCamera");
    await _cameraChannel?.invokeMethod('openUVCCamera');
  }

  void captureStreamStart() {
    _cameraChannel?.invokeMethod('captureStreamStart');
  }

  void captureStreamStop() {
    _cameraChannel?.invokeMethod('captureStreamStop');
  }

  void startCamera() async {
    await _cameraChannel?.invokeMethod('startCamera');
  }

  ///切换相机
  Future<void> switchCamera() async {
    await _cameraChannel?.invokeMethod('switchCamera');
  }

  /// 获取全部预览大小
  Future getAllPreviewSizes() async {
    var result = await _cameraChannel?.invokeMethod('getAllPreviewSizes');
    List<PreviewSize> list = [];
    json.decode(result)?.forEach((element) {
      list.add(PreviewSize.fromJson(element));
    });
    _previewSizes = list;
    return list;
  }

  /// 获取当前摄像头请求参数
  Future<String?> getCurrentCameraRequestParameters() async {
    return await _cameraChannel
        ?.invokeMethod('getCurrentCameraRequestParameters');
  }

  /// 更新预览大小
  void updateResolution(PreviewSize? previewSize) {
    _cameraChannel?.invokeMethod('updateResolution', previewSize?.toMap());
  }

  // ///拍照
  // Future<String?> takePicture() async {
  //   String? path = await _cameraChannel?.invokeMethod('takePicture');
  //   debugPrint("path: $path");
  //   return path;
  // }

  // ///录像
  // Future<String?> captureVideo() async {
  //   String? path = await _cameraChannel?.invokeMethod('captureVideo');
  //   debugPrint("path: $path");
  //   return path;
  // }

  void _setCameraState(String state) {
    debugPrint("Camera: $state");
    switch (state) {
      case "OPENED":
        _cameraState = UVCCameraState.opened;
        cameraStateCallback?.call(UVCCameraState.opened);
        break;
      case "CLOSED":
        _cameraState = UVCCameraState.closed;
        cameraStateCallback?.call(UVCCameraState.closed);
        break;
      default:
        if (state.contains("ERROR")) {
          _cameraState = UVCCameraState.error;
          _cameraErrorMsg = state;
          cameraStateCallback?.call(UVCCameraState.error);
          _messageStreamController.sink.add(state);
        }
        break;
    }
  }

  void _takePictureSuccess(String? result) {
    if (result != null) {
      _takePicturePath = result;
      clickTakePictureButtonCallback?.call(result);
    }
  }

  void closeCamera() {
    _cameraChannel?.invokeMethod('closeCamera');
  }
}
