import 'dart:async';
import 'dart:io';
import 'dart:math';
import 'package:flutter/material.dart';

import 'package:tuple/tuple.dart';

import 'impl/convert_helper.dart';
import 'impl/global_helper.dart';
import 'text_box/text/config.dart';
import 'text_box/text_box_config_controller.dart';
import 'types/types.dart';
import 'impl/ffmpeg_manager.dart';
import 'impl/resource_manager.dart';
import 'impl/auto_edit_helper.dart';
import 'impl/ml_kit_helper.dart';
import 'impl/text_widget.dart';
import 'impl/ffmpeg_helper.dart';

import 'text_box/text_box_builder.dart';

import 'extensions/extensions.dart';

const double _titleExportPercentage = 1 / 3.0;

class VMSDKWidget extends StatelessWidget {
  VMSDKWidget({Key? key}) : super(key: key);

  final ZZTextWidget _textWidget = ZZTextWidget();

  final TextBoxConfigController _textBoxConfigController =
      TextBoxConfigController("label", padding: const EdgeInsets.all(10));
  final CanvasTextConfig _textConfig = CanvasTextConfig(text: "");

  bool _isInitialized = false;
  final FFMpegManager _ffmpegManager = FFMpegManager();

  Timer? _currentTimer;
  EGenerateStatus _currentStatus = EGenerateStatus.encoding;
  int _currentRenderedFrame = 0;
  int _maxRenderedFrame = 0;
  int _currentRenderedFrameInCallback = 0;
  int _allFrame = 0;

  bool get isInitialized {
    return _isInitialized;
  }

  // Intializing before video generate
  Future<void> initialize() async {
    await ResourceManager.getInstance().loadResourceMap();
    await loadLabelMap();
    _isInitialized = true;
  }

  Future<String?> extractMLKitDetectData(MediaData data) async {
    try {
      return await extractData(data);
    } //
    catch (e) {
      return null;
    }
  }

  Future<(String?, TextExportData?)> extractPreview(
      EditedTextData textData) async {
    return _textWidget.loadText(textData.id,
        initTexts: textData.texts.values.toList());
  }

  Future<VideoGeneratedResult> generateVideoFromJSON(String encodedJSON,
      Function(EGenerateStatus status, double progress)? progressCallback,
      {String? language}) async {
    AllEditedData allEditedData = parseJSONToAllEditedData(encodedJSON);

    return generateVideoFromObject(allEditedData, progressCallback,
        language: language);
  }

  Future<VideoGeneratedResult> generateVideoFromObject(
      AllEditedData allEditedData,
      Function(EGenerateStatus status, double progress)? progressCallback,
      {String? language}) async {
    List<EditedTextData> texts = [];
    for (final EditedMedia editedMedia in allEditedData.editedMediaList) {
      if (editedMedia.editedTexts.isNotEmpty) {
        texts.addAll(editedMedia.editedTexts);
      }
    }

    double totalProgress = 0;
    for (final EditedTextData editedText in texts) {
      await _textWidget.loadText(editedText.id,
          initTexts: editedText.texts.values.toList(),
          language: language,
          isExtractPreviewImmediate: false);
      await _textWidget.extractAllSequence((progress) {
        if (progressCallback != null) {
          progressCallback(
              _currentStatus,
              (totalProgress + (progress / texts.length)) *
                  _titleExportPercentage);
        }
      });
      totalProgress = min(totalProgress + (1.0 / texts.length), 1.0);

      editedText.textExportData = TextExportData(
          editedText.id,
          _textWidget.width,
          _textWidget.height,
          _textWidget.frameRate,
          _textWidget.totalFrameCount,
          _textWidget.allSequencesPath!,
          _textWidget.textDataMap);
    }

    final VideoGeneratedResult result = await _runFFmpeg(
        allEditedData.editedMediaList,
        allEditedData.musicList,
        allEditedData.ratio,
        progressCallback);
    return result;
  }

  Future<List<MediaData>> _filterNotExistsMedia(
      List<MediaData> mediaList) async {
    List<MediaData> result = [];

    for (final media in mediaList) {
      final File file = File(media.absolutePath);
      final bool isExists = await file.exists();

      if (isExists) {
        result.add(media);
      }
    }

    return result;
  }

  int _currentThumbnailExtractCount = 0;
  Future<void> extractAndMapThumbnail(EditedMedia editedMedia,
      {ERatio? ratio}) async {
    if (ratio != null) {
      setRatio(ratio);
    }
    while (_currentThumbnailExtractCount >= 5) {
      await Future.delayed(const Duration(milliseconds: 100));
    }
    _currentThumbnailExtractCount++;
    try {
      editedMedia.thumbnailPath = await extractThumbnail(editedMedia);
    } catch (e) {
      print(e);
    }
    _currentThumbnailExtractCount--;
  }

  Future<VideoGeneratedResult> _runFFmpeg(
      List<EditedMedia> editedMediaList,
      List<MusicData> musicList,
      ERatio ratio,
      Function(EGenerateStatus status, double progress)? progressCallback,
      {isAutoEdit = false,
      isRunFFmpeg = true}) async {
    try {
      final List<SpotInfo> spotInfoList = [];
      final List<String?> thumbnailList = [];

      setRatio(ratio);

      double currentDuration = 0;
      for (int i = 0; i < editedMediaList.length; i++) {
        spotInfoList.add(
            SpotInfo(currentDuration, editedMediaList[i].mediaData.gpsString));
        currentDuration += editedMediaList[i].duration;
      }

      if (!isRunFFmpeg) {
        // List<Future> extractThumbnailFutures = [];
        // for (int i = 0; i < editedMediaList.length; i++) {
        //   extractThumbnailFutures.add(_extractAndMapThumbnail(editedMediaList[i]));
        // }
        // await Future.wait(extractThumbnailFutures);

        // for (int i = 0; i < editedMediaList.length; i++) {
        //   thumbnailList.add(editedMediaList[i].thumbnailPath!);
        // }

        final VideoGeneratedResult result =
            VideoGeneratedResult("", spotInfoList, thumbnailList);

        result.editedMediaList.addAll(editedMediaList);
        result.musicList.addAll(musicList);

        return result;
      }

      await ResourceManager.getInstance()
          .loadResourceFromAssets(editedMediaList, ratio);

      _currentStatus = EGenerateStatus.encoding;
      _currentRenderedFrame = 0;
      _maxRenderedFrame = 0;
      _currentRenderedFrameInCallback = 0;
      _allFrame = 0;

      int videoFramerate = getFramerate();
      for (int i = 0; i < editedMediaList.length; i++) {
        final EditedMedia editedMedia = editedMediaList[i];
        double duration =
            normalizeTime(editedMedia.duration + editedMedia.xfadeDuration);
        _allFrame += (duration * videoFramerate).floor();

        if (i < editedMediaList.length - 1) {
          TransitionData? transition = editedMedia.transition;
          if (transition != null && transition.type == ETransitionType.xfade) {
            final EditedMedia nextMedia = editedMediaList[i + 1];
            double duration = normalizeTime(editedMedia.duration +
                nextMedia.duration -
                editedMedia.xfadeDuration -
                0.01);
            _allFrame += (duration * videoFramerate).floor();
          }
        }
      }

      if (_currentTimer != null) {
        _currentTimer!.cancel();
      }

      _currentTimer =
          Timer.periodic(const Duration(milliseconds: 250), (timer) {
        _currentTimer = timer;
        if (progressCallback != null) {
          if (_currentRenderedFrame + _currentRenderedFrameInCallback >
              _maxRenderedFrame) {
            _maxRenderedFrame =
                _currentRenderedFrame + _currentRenderedFrameInCallback;
          }

          progressCallback(
              _currentStatus,
              min(
                  1.0,
                  _titleExportPercentage +
                      (_maxRenderedFrame / _allFrame) *
                          (1 - _titleExportPercentage) -
                      0.01));
        }
      });

      DateTime now = DateTime.now();

      final List<RenderedData> clipDataList = [];
      double totalDuration = 0;

      for (int i = 0; i < editedMediaList.length; i++) {
        final EditedMedia editedMedia = editedMediaList[i];

        TransitionData? prevTransition, nextTransition;
        if (i > 0) {
          prevTransition = editedMediaList[i - 1].transition;
        }
        if (i < editedMediaList.length - 1) {
          nextTransition = editedMediaList[i].transition;
        }

        final RenderedData clipData = await clipRender(
            editedMedia,
            i,
            prevTransition,
            nextTransition,
            (statistics) => _currentRenderedFrameInCallback =
                statistics.getVideoFrameNumber(),
            isOnlyOneClip: editedMediaList.length == 1);

        _currentRenderedFrameInCallback = 0;

        double duration =
            normalizeTime(editedMedia.duration + editedMedia.xfadeDuration);
        _currentRenderedFrame += (duration * videoFramerate).floor();

        clipDataList.add(clipData);

        String? thumbnailPath = await extractThumbnail(editedMediaList[i]);
        editedMedia.thumbnailPath = thumbnailPath;
        thumbnailList.add(thumbnailPath);

        totalDuration += editedMedia.duration;
      }

      RenderedData curRendered;
      final List<RenderedData> xfadeAppliedList = [];
      for (int i = 0; i < clipDataList.length; i++) {
        curRendered = clipDataList[i];
        final EditedMedia editedMedia = editedMediaList[i];
        TransitionData? xfadeTransition = editedMediaList[i].transition;

        if (i < editedMediaList.length - 1 &&
            editedMedia.xfadeDuration > 0 &&
            xfadeTransition != null &&
            xfadeTransition.type == ETransitionType.xfade) {
          // 0 1 2 3
          final RenderedData nextRendered = clipDataList[i + 1];

          curRendered = await applyXFadeTransitions(
              curRendered,
              nextRendered,
              i,
              (xfadeTransition as XFadeTransitionData).filterName,
              editedMedia.xfadeDuration,
              (statistics) => _currentRenderedFrameInCallback =
                  statistics.getVideoFrameNumber());

          _currentRenderedFrameInCallback = 0;
          double duration = normalizeTime(curRendered.duration +
              nextRendered.duration -
              editedMedia.xfadeDuration -
              0.01);
          _currentRenderedFrame += (duration * videoFramerate).floor();
          clipDataList[i + 1] = curRendered;
        } else {
          xfadeAppliedList.add(curRendered);
        }
      }

      if (isAutoEdit && editedMediaList.length > 1 && totalDuration >= 10) {
        double curDuration = 0;
        List<RenderedData> fadeOutClips = [];
        for (int i = xfadeAppliedList.length - 1; i >= 0; i--) {
          RenderedData lastClip = xfadeAppliedList.removeLast();
          fadeOutClips.add(lastClip);

          curDuration += lastClip.duration;
          if (curDuration >= 2) {
            final RenderedData fadeOutApplied =
                await applyFadeOut(fadeOutClips.reversed.toList());

            xfadeAppliedList.add(fadeOutApplied);
            break;
          }
        }
      }

      List<MusicData> regeneratedMusicList = [];
      if (musicList.isNotEmpty) {
        int currentMusicIndex = 0;
        double remainTotalDuration = totalDuration;

        while (remainTotalDuration > 0) {
          MusicData musicData = musicList[currentMusicIndex % musicList.length];
          regeneratedMusicList.add(musicData);

          remainTotalDuration -= musicData.duration;
          currentMusicIndex++;
        }
      }

      final RenderedData mergedClip = await mergeAllClips(xfadeAppliedList);
      final RenderedData resultClip =
          await applyMusics(mergedClip, regeneratedMusicList);

      print(
          "elapsed time for rendering : ${DateTime.now().difference(now).inMilliseconds / 1000}s");
      _currentStatus = EGenerateStatus.finishing;
      _currentRenderedFrame = _allFrame;
      File resultFile = File(resultClip.absolutePath);
      if (await resultFile.exists()) {
        double fileSizeInMegaBytes =
            ((await resultFile.length()) * 1.0) / 1024 / 1024;
        print("resultFile : ${(fileSizeInMegaBytes * 100).floor() / 100}MB");
      }

      if (_currentTimer != null) {
        _currentTimer!.cancel();
      }
      _currentTimer = null;

      if (progressCallback != null) {
        progressCallback(_currentStatus, 1);
      }

      final VideoGeneratedResult result = VideoGeneratedResult(
          resultClip.absolutePath, spotInfoList, thumbnailList);

      result.editedMediaList.addAll(editedMediaList);
      result.musicList.addAll(musicList);

      return result;
    } catch (e) {
      if (_currentTimer != null) {
        _currentTimer!.cancel();
      }
      _currentTimer = null;

      rethrow;
    }
  }

  // cancel generate
  void cancelGenerate() async {
    try {
      await _ffmpegManager.cancel();
    } catch (e) {}
  }

  // release
  void release() {}

  @override
  Widget build(BuildContext context) {
    return Transform.translate(
        offset: const Offset(-9999999, -99999),
        child: Stack(children: [
          _textWidget,
          TextBoxBuilder(
              controller: _textBoxConfigController, config: _textConfig)
        ]));
  }
}

extension EditedMediaHandler on EditedMedia {
  Future handle() async {
    final mediaData = this.mediaData;

    if (mediaData.type == EMediaType.image) {
      await extractVideoFromImageMedia(this);
    } else {
      thumbnailPath = await extractThumbnailAvailable(this);
    }
  }
}
