import 'dart:async';
import 'dart:io';
import 'dart:typed_data';

import 'package:finance_app/services/edge_detection/edge_detection_shape/edge_detection_draw.dart';
import 'package:finance_app/services/edge_detection/isolate_utils.dart';
import 'package:finance_app/services/mlv_service.dart';
import 'package:flutter/material.dart';
import 'package:camera/camera.dart';
import 'package:my_edge_detection/edge_detection.dart';
import 'dart:ui' as ui;

class NormalCamera extends StatefulWidget {
  final FlashMode flashMode;
  final bool activeOCR;

  const NormalCamera({Key key, this.flashMode, this.activeOCR: true})
      : super(key: key);

  @override
  NormalCameraState createState() => NormalCameraState();
}

class NormalCameraState extends State<NormalCamera>
    with WidgetsBindingObserver {
  List<CameraDescription> cameras = [];
  CameraController controller;
  GlobalKey cameraKey = GlobalKey();
  double _minAvailableExposureOffset = 0.0;
  double _maxAvailableExposureOffset = 0.0;
  double _minAvailableZoom = 1.0;
  double _maxAvailableZoom = 1.0;
  int _pointers = 0;
  double _currentScale = 1.0;
  double _baseScale = 1.0;
  FlashMode _flashMode;
  MLVService mlvService = MLVService();
  EdgeDetectionResult edgeDetectionResult;
  StreamController<ui.Image> imageStream = StreamController<ui.Image>();
  String label = '';

  @override
  void initState() {
    super.initState();
    initCamera();
    mlvService.init();
    WidgetsBinding.instance.addObserver(this);
  }

  @override
  void dispose() {
    controller.dispose();
    mlvService.end();
    imageStream.close();
    super.dispose();
    WidgetsBinding.instance.removeObserver(this);
  }

  void initCamera() async {
    try {
      cameras = await availableCameras();
    } on CameraException catch (e) {
      // logError(e.code, e.description);
    }
    _flashMode = widget.flashMode;
    onNewCameraSelected(cameras.first);
  }

  void onNewCameraSelected(CameraDescription cameraDescription) async {
    if (controller != null) {
      await controller.dispose();
    }
    final CameraController cameraController = CameraController(
      cameraDescription,
      ResolutionPreset.high,
      enableAudio: false,
      // imageFormatGroup: ImageFormatGroup.jpeg,
    );
    controller = cameraController;

    // If the controller is updated then update the UI.
    // cameraController.addListener(() {
    //   if (mounted) setState(() {});
    //   if (cameraController.value.hasError) {
    //     // showInSnackBar(
    //     //     'Camera error ${cameraController.value.errorDescription}');
    //   }
    // });

    try {
      await cameraController.initialize();

      await Future.wait([
        cameraController
            .getMinExposureOffset()
            .then((value) => _minAvailableExposureOffset = value),
        cameraController
            .getMaxExposureOffset()
            .then((value) => _maxAvailableExposureOffset = value),
        cameraController
            .getMaxZoomLevel()
            .then((value) => _maxAvailableZoom = value),
        cameraController
            .getMinZoomLevel()
            .then((value) => _minAvailableZoom = value),
      ]);
      startStream();
    } on CameraException catch (e) {
      _showCameraException(e);
    }

    if (mounted) {
      setState(() {});
    }
  }

  void _showCameraException(CameraException e) {
    // logError(e.code, e.description);
    // showInSnackBar('Error: ${e.code}\n${e.description}');
  }

  void disposeCamera() async {
    if (controller != null) {
      await controller.dispose();
    }
  }

  @override
  Widget build(BuildContext context) {
    final CameraController cameraController = controller;
    if (cameraController == null || !cameraController.value.isInitialized) {
      return Container();
    } else {
      return Listener(
        onPointerDown: (_) => _pointers++,
        onPointerUp: (_) => _pointers--,
        child: Stack(
          children: [
            Container(
              key: cameraKey,
              child: CameraPreview(
                controller,
                child: LayoutBuilder(builder:
                    (BuildContext context, BoxConstraints constraints) {
                  return GestureDetector(
                    behavior: HitTestBehavior.opaque,
                    onScaleStart: _handleScaleStart,
                    onScaleUpdate: _handleScaleUpdate,
                    onTapDown: (details) =>
                        onViewFinderTap(details, constraints),
                  );
                }),
              ),
            ),
            StreamBuilder<ui.Image>(
                stream: imageStream.stream,
                builder:
                    (BuildContext context, AsyncSnapshot<ui.Image> snapshot) {
                  return getEdgePaint(snapshot, context);
                }),
            if (label != "")
              Align(
                  alignment: Alignment.center,
                  child: Container(
                      padding: EdgeInsets.all(8),
                      decoration: BoxDecoration(
                          color: label == "Receipt Detected"
                              ? Colors.green.withOpacity(0.6)
                              : Colors.red.withOpacity(0.6),
                          borderRadius: BorderRadius.circular(6)),
                      child: Text(
                        label,
                        style: TextStyle(color: Colors.white),
                      ))),
          ],
        ),
      );
    }
  }

  Widget getEdgePaint(
      AsyncSnapshot<ui.Image> imageSnapshot, BuildContext context) {
    if (imageSnapshot.connectionState == ConnectionState.waiting)
      return Container();

    if (imageSnapshot.hasError) return Text('Error: ${imageSnapshot.error}');

    if (edgeDetectionResult == null) return Container();

    final keyContext = cameraKey.currentContext;

    if (keyContext == null) {
      return Container();
    }

    final box = keyContext.findRenderObject() as RenderBox;

    return EdgeDetectionDraw(
      originalImageSize: Size(imageSnapshot.data.width.toDouble(),
          imageSnapshot.data.height.toDouble()),
      renderedImageSize: Size(box.size.width, box.size.height),
      edgeDetectionResult: edgeDetectionResult,
    );
  }

  void _handleScaleStart(ScaleStartDetails details) {
    _baseScale = _currentScale;
  }

  Future<void> _handleScaleUpdate(ScaleUpdateDetails details) async {
    // When there are not exactly two fingers on screen don't scale
    if (controller == null || _pointers != 2) {
      return;
    }

    _currentScale = (_baseScale * details.scale)
        .clamp(_minAvailableZoom, _maxAvailableZoom);

    await controller.setZoomLevel(_currentScale);
  }

  void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) {
    if (controller == null) {
      return;
    }

    final CameraController cameraController = controller;

    final offset = Offset(
      details.localPosition.dx / constraints.maxWidth,
      details.localPosition.dy / constraints.maxHeight,
    );
    cameraController.setExposurePoint(offset);
    cameraController.setFocusPoint(offset);
  }

  @override
  void didChangeAppLifecycleState(AppLifecycleState state) {
    final CameraController cameraController = controller;

    // App state changed before we got the chance to initialize.
    if (cameraController == null || !cameraController.value.isInitialized) {
      return;
    }
    if (state == AppLifecycleState.inactive) {
      cameraController.dispose();
    } else if (state == AppLifecycleState.resumed) {
      onNewCameraSelected(cameraController.description);
    }
  }

  @override
  void didUpdateWidget(covariant NormalCamera oldWidget) {
    if (_flashMode != widget.flashMode) {
      _flashMode = widget.flashMode;
      setFlashMode(_flashMode);
    }
    super.didUpdateWidget(oldWidget);
  }

  Future<void> setFlashMode(FlashMode mode) async {
    if (controller == null) {
      return;
    }
    try {
      await controller.setFlashMode(mode);
    } on CameraException catch (e) {
      _showCameraException(e);
      rethrow;
    }
  }

  Future<void> startStream() async {
    if (!widget.activeOCR) return;
    await controller.startImageStream((image) async {
      CategoryWithEdge categoryWithEdge =
          await mlvService.onClassifiedResult(image);
      if (categoryWithEdge != null) {
        loadUiImage();
        if (categoryWithEdge.category.label == "Receipt Detected") {
          edgeDetectionResult = categoryWithEdge.edgeDetectionResult;
          if (!edgeDetectionResult.hasResult()) {
            //Do not draw if edge detection has not result
            edgeDetectionResult = null;
          }
        } else {
          edgeDetectionResult = null;
        }
        if (label != categoryWithEdge.category.label) {
          label = categoryWithEdge.category.label;
          print("label $label");
          if (mounted) {
            setState(() {});
          }
        }
      }
    });
  }

  Future<void> loadUiImage() async {
    if (edgeDetectionResult == null) return null;
    final Uint8List data =
        await File(edgeDetectionResult.imagePath).readAsBytes();
    ui.decodeImageFromList(Uint8List.view(data.buffer), (ui.Image image) {
      imageStream.add(image);
    });
  }

  Future<void> stopStreaming() async {
    if (!widget.activeOCR) return;
    await controller.stopImageStream();
  }

  Future<XFile> takePicture() async {
    final CameraController cameraController = controller;
    if (cameraController == null || !cameraController.value.isInitialized) {
      // showInSnackBar('Error: select a camera first.');
      return null;
    }
    if (cameraController.value.isTakingPicture) {
      // A capture is already pending, do nothing.
      return null;
    }
    try {
      XFile file = await cameraController.takePicture();
      return file;
    } on CameraException catch (e) {
      _showCameraException(e);
      return null;
    }
  }
}
