import 'dart:io';
import 'dart:math';
import 'package:ffmpeg_kit_flutter_full_gpl/ffmpeg_kit.dart';
import 'package:ffmpeg_kit_flutter_full_gpl/ffmpeg_kit_config.dart';
import 'package:ffmpeg_kit_flutter_full_gpl/ffprobe_kit.dart';
import 'package:ffmpeg_kit_flutter_full_gpl/media_information.dart';
import 'package:ffmpeg_kit_flutter_full_gpl/return_code.dart';
import 'package:ffmpeg_kit_flutter_full_gpl/statistics.dart';
import 'package:ffmpeg_kit_flutter_full_gpl/stream_information.dart';
import 'package:path/path.dart' as p;
import 'package:uuid/uuid.dart';
import 'package:zz_video_maker/src/impl/resource_fetch_helper.dart';
import 'package:zz_video_maker/zz_video_maker.dart';

import '../types/types.dart';
import 'global_helper.dart';
import 'ffmpeg_manager.dart';

Resolution _resolution = Resolution(0, 0);
int _scaledVideoWidth = 0;
int _scaledVideoHeight = 0;
int _framerate = 30;
ERatio _ratio = ERatio.ratio11;

double _scaleFactor = 2 / 3.0;
double _minDurationFactor = 1 / _framerate;
const int _fadeDuration = 3;

class RenderedData {
  String absolutePath;
  double duration;

  RenderedData(this.absolutePath, this.duration);
}

final FFMpegManager _ffmpegManager = FFMpegManager();

String _getTransposeFilter(int orientation) {
  // switch (orientation) {
  //   case 90: return "transpose=1,";
  //   case 180: return "transpose=2,transpose=2,";
  //   case 270: return "transpose=2,";
  //   default: return "";
  // }
  return "";
}

int _getEvenNumber(int num) {
  num -= (num % 2);
  return num;
}

void setRatio(ERatio ratio) {
  _ratio = ratio;
  _resolution = Resolution.fromRatio(ratio);

  _scaledVideoWidth =
      _getEvenNumber((_resolution.width * _scaleFactor).floor());
  _scaledVideoHeight =
      _getEvenNumber((_resolution.height * _scaleFactor).floor());
}

bool? _isImage(String filePath) {
  String extension = p.extension(filePath).toLowerCase();
  if (extension == '.jpg' ||
      extension == '.jpeg' ||
      extension == '.png' ||
      extension == '.gif') {
    return true; // 图片类型
  } else if (extension == '.mp4' ||
      extension == '.mov' ||
      extension == '.avi' ||
      extension == '.mkv') {
    return false; // 视频类型
  }
  return null;
}

Future<MediaData?> fileInfo(String path) async {
  var isImage = _isImage(path);
  if (isImage == null) {
    throw Exception('Path不正确');
  }
  var isVideo = !isImage;
  final mediaInfo =
      (await FFprobeKit.getMediaInformation(path)).getMediaInformation();
  if (mediaInfo == null) {
    return null;
  }

  var streams = mediaInfo.getStreams();
  EMediaType type = isVideo ? EMediaType.video : EMediaType.image;

  var format = mediaInfo.getFormatProperty(MediaInformation.keyTags) as Map?;
  var creationTime = format?['creation_time'] as String?;
  var res = await videoRealWidthHeight(path);
  var width = res?.$1 ?? 0;
  var height = res?.$2 ?? 0;
  var orientation = 0;
  double duration = isVideo ? double.parse(mediaInfo.getDuration() ?? '0') : 5;
  var createDate =
      creationTime != null ? DateTime.parse(creationTime) : DateTime.now();
  String? gpsString = '';
  String? mlkitDetected;
  var key = const Uuid().v4();

  if (width == 0) {
    for (var stream in streams) {
      var width0 = stream.getWidth();
      var height0 = stream.getHeight();
      if (width0 != null) {
        width = width0;
      }
      if (height0 != null) {
        height = height0;
      }
    }
  }

  return MediaData(key, path, type, width, height, orientation, duration,
      createDate, gpsString, mlkitDetected);
}

Future<(int, int)?> videoRealWidthHeight(String videoPath) async {
  final String command = '-i $videoPath';

  await FFprobeKit.execute(command);
  final returnRes = await FFmpegKitConfig.getLastSession();
  final returnCode = await returnRes?.getReturnCode();

  if (ReturnCode.isSuccess(returnCode)) {
    final String output = (await returnRes!.getOutput())!;
    RegExp regExp = RegExp(r'Video: .+ (\d+)x(\d+)');

    Match? match = regExp.firstMatch(output);
    int? width;
    int? height;
    if (match != null && match.groupCount == 2) {
      width = int.parse(match.group(1)!);
      height = int.parse(match.group(2)!);
    }

    if (width == null || height == null) {
      return null;
    }

    regExp = RegExp(r'rotation of ([-\d.]+) degrees');
    match = regExp.firstMatch(output);
    if (match != null && match.groupCount == 1) {
      final String value = match.group(1)!;
      final int rotation = double.parse(value).toInt();
      if (rotation % 180 != 0) {
        final temp = width;
        width = height;
        height = temp;
      }
    }

    return (width, height);
  }

  return null;
}

Future<RenderedData> clipRender(
    EditedMedia editedMedia,
    int clipIdx,
    TransitionData? prevTransition,
    TransitionData? nextTransition,
    Function(Statistics)? ffmpegCallback,
    {isOnlyOneClip = false}) async {
  final MediaData mediaData = await scaleImageMedia(editedMedia.mediaData);
  final FrameData? frame = editedMedia.frame;
  final List<EditedStickerData> stickerList = editedMedia.stickers;
  final List<CanvasTextData> canvasTexts = editedMedia.canvasTexts;
  final List<EditedTextData> textList = editedMedia.editedTexts;
  final List<FlutterTextModel> textModelList = [];

  double duration =
      normalizeTime(editedMedia.duration + editedMedia.xfadeDuration);
  double startTime = normalizeTime(editedMedia.startTime);

  final List<String> arguments = <String>[];
  final String appDirPath = await getAppDirectoryPath();
  final String resourceDirPath = await getAppResourcePath();
  final String outputPath = "$appDirPath/clip$clipIdx.mp4";

  final List<String> inputArguments = <String>[]; // -i arguments
  final List<String> filterStrings = <String>[]; // -filter_complex strings

  int inputFileCount = 0;
  String trimFilter = "";
  String videoOutputMapVariable = "";
  String audioOutputMapVariable = "";

  /////////////////////////
  // INPUT IMAGE & VIDEO //
  /////////////////////////
  if (mediaData.type == EMediaType.image) {
    inputArguments.addAll(["-framerate", "$_framerate", "-loop", "1"]);
    inputArguments.addAll([
      "-t",
      "$duration",
      "-i",
      mediaData.scaledPath ?? mediaData.absolutePath
    ]);

    audioOutputMapVariable = "1:a";
  } //
  else {
    trimFilter = "trim=$startTime:${startTime + duration},setpts=PTS-STARTPTS,";
    inputArguments.addAll(["-i", mediaData.absolutePath]);

    final mediaInfo =
        (await FFprobeKit.getMediaInformation(mediaData.absolutePath))
            .getMediaInformation();
    final List<StreamInformation> streams =
        mediaInfo != null ? mediaInfo.getStreams() : [];

    bool isAudioExists = false;
    for (final stream in streams) {
      if (stream.getType() == "audio") {
        isAudioExists = true;
        break;
      }
    }

    if (isAudioExists) {
      filterStrings.add(
          "[0:a]atrim=$startTime:${startTime + duration},asetpts=PTS-STARTPTS[aud];[aud][1:a]amix=inputs=2[aud_mixed];[aud_mixed]atrim=0:$duration,asetpts=PTS-STARTPTS[aud_trim];[aud_trim]volume=${editedMedia.volume}[aud_volume_applied];");
      audioOutputMapVariable = "[aud_volume_applied]";
    } else {
      audioOutputMapVariable = "1:a";
    }
  }

  // [1:a]
  inputArguments.addAll([
    "-f",
    "lavfi",
    "-t",
    duration.toString(),
    "-i",
    "anullsrc=channel_layout=stereo:sample_rate=44100"
  ]);
  inputFileCount++;

  // int cropLeft = max(0, (mediaData.width * editedMedia.cropLeft).floor());
  // int cropRight =
  //     min(mediaData.width, (mediaData.width * editedMedia.cropRight).floor());
  // int cropTop = max(0, (mediaData.height * editedMedia.cropTop).floor());
  // int cropBottom = min(
  //     mediaData.height, (mediaData.height * editedMedia.cropBottom).floor());

  // int cropWidth = cropRight - cropLeft;
  // int cropHeight = cropBottom - cropTop;
  var outputWidth = _resolution.width;
  var outputHeight = _resolution.height;
  var inputWidth = mediaData.width;
  var inputHeight = mediaData.height;
  var ih = inputHeight;
  var iw = inputWidth;
  var w = outputWidth;
  var h = outputHeight;

  var radio = outputWidth / outputHeight;
  var realRadio = inputWidth / inputHeight;

  bool isWidth = realRadio > radio;

  String str(t, f) {
    return isWidth ? t : f;
    // return "'if(gt(iw/ih, $radio),$t,$f)'";
  }

  if (realRadio == radio) {
    filterStrings.add(
        // ignore: prefer_interpolation_to_compose_strings
        "[0:v]fps=$_framerate,$trimFilter${_getTransposeFilter(mediaData.orientation)}" +
            "scale=$outputWidth:$outputHeight," +
            "setdar=dar=${outputWidth / outputHeight}" +
            // ",zoompan=z='min(zoom+0.01,1.5)':d=${duration * _framerate}" +
            "[vid];");
  } else {
    filterStrings.add(
        // ignore: prefer_interpolation_to_compose_strings
        "[0:v]fps=$_framerate,$trimFilter${_getTransposeFilter(mediaData.orientation)}" +
            "split[a][b];" +
            "[a]crop=${str('ih', 'iw')}:${str('ih', 'iw')}:${str('((iw-ih)/2)', '0')}:${str('0', '(ih-iw)/2')},scale=${str('iw/10', 'ih/10')}:-1,gblur=sigma=5,scale=$outputWidth:$outputHeight[1];" +
            "[b]scale=${str('$outputWidth:-1', '-1:$outputHeight')}" +
            // ",zoompan=z='zoom+0.01':d=${duration * _framerate}" +
            // ",zoompan=z='min(zoom+0.01,1.5)':d=${duration * _framerate}" +
// zoompan=z='zoom+0.01':d=100, crop=in_w:in_h:ow/2-(in_w/zoom)/2:oh/2-(in_h/zoom)/2
            "[2];" +
            "[1][2]overlay=${str('0', '(W-w)/2')}:${str('(H-h)/2', '0')}" +
            "[vid];");
  }
  // ffmpeg -i input.mp4 -vf "[0:v]fps=30,
  //split[a][b];
  //[a]crop=ih:ih:((iw-ih)/2):0,scale=iw/10:-1,zoompan=z='min(zoom+0.01,1.5)':d=50,setsar=1[g];
  //[b]scale=1080:-1[bg];
  //[g][bg]overlay=0:(H-h)/2[vid]" -c:a copy output.mp4

  // ffmpeg -i input.mp4 -vf "crop=iw:ih:(iw-ih)/2:0,scale=1080:1080,
  //split[a][b];
  //[a]gblur=sigma=5[1];
  //[b]scale=1080:(1080*ih/iw)[2];
  //[1][2]overlay=0:(H-h)/2" output.mp4
  print('filterStrings.last ${filterStrings.last}');
  videoOutputMapVariable = "[vid]";
  inputFileCount++;

  ///////////////
  // ADD FRAME //
  ///////////////

  if (frame != null) {
    ResourceFileInfo fileInfo = frame.fileMap[_ratio]!;

    final int loopCount = (duration / fileInfo.duration).floor();
    const String frameMapVariable = "[frame]";
    const String frameMergedMapVariable = "[frame_merged]";

    inputArguments.addAll([
      "-stream_loop",
      loopCount.toString(),
      "-c:v",
      "libvpx-vp9",
      "-i",
      "$resourceDirPath/${fileInfo.source.name}"
    ]);

    filterStrings.add(
        "[${inputFileCount++}:v]trim=0:$duration,setpts=PTS-STARTPTS,scale=${_resolution.width}:${_resolution.height},setdar=dar=${_resolution.width / _resolution.height}$frameMapVariable;");
    filterStrings.add(
        "$videoOutputMapVariable${frameMapVariable}overlay$frameMergedMapVariable;");

    videoOutputMapVariable = frameMergedMapVariable;
  }

  /////////////////
  // ADD STICKER //
  /////////////////

  for (int i = 0; i < stickerList.length; i++) {
    final EditedStickerData sticker = stickerList[i];
    ResourceFileInfo fileInfo = sticker.fileInfo!;

    final int loopCount = (duration / fileInfo.duration).floor();
    final String stickerMapVariable = "[sticker$i]";
    final String stickerScaledMapVariable = "[sticker_scaled$i]";
    final String stickerRotatedMapVariable = "[sticker_rotated$i]";
    final String stickerMergedMapVariable = "[sticker_merged$i]";

    double rotate = sticker.rotate;
    if (rotate < 0) rotate = pi + (pi + rotate);

    double rotateForCal = rotate;
    if (rotateForCal > pi) rotateForCal -= pi;
    if (rotateForCal > pi / 2) {
      rotateForCal = (pi / 2) - (rotateForCal - (pi / 2));
    }

    inputArguments.addAll([
      "-stream_loop",
      loopCount.toString(),
      "-c:v",
      "libvpx-vp9",
      "-i",
      "$resourceDirPath/${fileInfo.source.name}"
    ]);

    filterStrings.add(
        "[${inputFileCount++}:v]trim=0:$duration,setpts=PTS-STARTPTS$stickerMapVariable;");
    filterStrings.add(
        "${stickerMapVariable}scale=${sticker.width}:${sticker.height}$stickerScaledMapVariable;");
    filterStrings.add(
        "${stickerScaledMapVariable}rotate=$rotate:c=none:ow=rotw($rotate):oh=roth($rotate)$stickerRotatedMapVariable;");
    filterStrings.add(
        "$videoOutputMapVariable${stickerRotatedMapVariable}overlay=${sticker.x}-(((${sticker.width}*cos($rotateForCal)+${sticker.height}*sin($rotateForCal))-${sticker.width})/2):${sticker.y}-(((${sticker.width}*sin($rotateForCal)+${sticker.height}*cos($rotateForCal))-${sticker.height})/2)$stickerMergedMapVariable;");

    videoOutputMapVariable = stickerMergedMapVariable;
  }

  /////////////////////
  // ADD CANVAS TEXT //
  /////////////////////

  for (int i = 0; i < canvasTexts.length; i++) {
    final CanvasTextData canvasText = canvasTexts[i];

    final String canvasTextScaledMapVariable = "[canvas_text_scaled$i]";
    final String canvasTextRotatedMapVariable = "[canvas_text_rotated$i]";
    final String canvasTextMergedMapVariable = "[canvas_text_merged$i]";

    double rotate = canvasText.rotate;
    if (rotate < 0) rotate = pi + (pi + rotate);

    double rotateForCal = rotate;
    if (rotateForCal > pi) rotateForCal -= pi;
    if (rotateForCal > pi / 2) {
      rotateForCal = (pi / 2) - (rotateForCal - (pi / 2));
    }

    inputArguments.addAll(["-i", canvasText.imagePath]);

    String overlayTimeFilter = "";
    if (isOnlyOneClip) {
      // overlayTimeFilter = "enable='between(t\\,0,${min(5, editedMedia.duration)})':";
    }

    filterStrings.add(
        "[${inputFileCount++}:v]scale=${canvasText.width}:-1$canvasTextScaledMapVariable;");
    filterStrings.add(
        "${canvasTextScaledMapVariable}rotate=$rotate:c=none:ow=rotw($rotate):oh=roth($rotate)$canvasTextRotatedMapVariable;");
    filterStrings.add(
        "$videoOutputMapVariable${canvasTextRotatedMapVariable}overlay=${overlayTimeFilter}x=${canvasText.x}-(((${canvasText.width}*cos($rotateForCal)+${canvasText.height}*sin($rotateForCal))-${canvasText.width})/2):y=${canvasText.y}-(((${canvasText.width}*sin($rotateForCal)+${canvasText.height}*cos($rotateForCal))-${canvasText.height})/2)$canvasTextMergedMapVariable;");

    videoOutputMapVariable = canvasTextMergedMapVariable;
  }

  ///////////////
  // ADD TITLE //
  ///////////////

  for (int i = 0; i < textList.length; i++) {
    final EditedTextData editedText = textList[i];
    final TextExportData? exportedText = editedText.textExportData;

    if (exportedText != null) {
      String textMapVariable = "[text$i]";
      String textRotatedMapVariable = "[text_rotated$i]";
      String textMergedMapVariable = "[text_merged$i]";

      double rotate = editedText.rotate;
      if (rotate < 0) rotate = pi + (pi + rotate);

      double rotateForCal = rotate;
      if (rotateForCal > pi) rotateForCal -= pi;
      if (rotateForCal > pi / 2) {
        rotateForCal = (pi / 2) - (rotateForCal - (pi / 2));
      }

      int width = (editedText.width).floor();
      int height = (editedText.height).floor();

      inputArguments.addAll([
        "-framerate",
        exportedText.frameRate.toString(),
        "-i",
        "${exportedText.allSequencesPath}/%d.png"
      ]);

      String overlayTimeFilter = "";
      if (isOnlyOneClip) {
        overlayTimeFilter =
            "enable='between(t\\,0,${min(5, editedMedia.duration)})':";
      }

      filterStrings.add(
          "[${inputFileCount++}:v]trim=0:$duration,setpts=PTS-STARTPTS,scale=$width:-1$textMapVariable;");
      filterStrings.add(
          "${textMapVariable}rotate=$rotate:c=none:ow=rotw($rotate):oh=roth($rotate)$textRotatedMapVariable;");
      filterStrings.add(
          "$videoOutputMapVariable${textRotatedMapVariable}overlay=${overlayTimeFilter}x=${editedText.x}-((($width*cos($rotateForCal)+$height*sin($rotateForCal))-$width)/2):y=${editedText.y}-((($width*sin($rotateForCal)+$height*cos($rotateForCal))-$height)/2)$textMergedMapVariable;");

      videoOutputMapVariable = textMergedMapVariable;
    }
  }

  ////////////////////
  // ADD TEXT MODEL //
  ////////////////////
  if (textModelList.isNotEmpty) {
    for (var element in textModelList) {
      filterStrings.add(
          "${videoOutputMapVariable}drawtext=text='${element.text}':x=${element.x}:y=${element.y}:fontsize=${element.fontSize}:fontcolor=${element.fontColor}[output];");
      videoOutputMapVariable = "[output]";
    }
  }

  ////////////////////////////
  // ADD OVERLAY TRANSITION //
  ////////////////////////////

  if (prevTransition != null &&
      prevTransition.type == ETransitionType.overlay) {
    final OverlayTransitionData transitionData =
        prevTransition as OverlayTransitionData;
    final TransitionFileInfo fileInfo = transitionData.fileMap[_ratio]!;

    String transitionMapVariable = "[prev_trans]";
    String transitionMergedMapVariable = "[prev_trans_merged]";

    inputArguments.addAll([
      "-c:v",
      "libvpx-vp9",
      "-i",
      "$resourceDirPath/${fileInfo.source.name}"
    ]);
    filterStrings.add(
        "[${inputFileCount++}:v]trim=${fileInfo.transitionPoint}:${fileInfo.duration},setpts=PTS-STARTPTS,scale=${_resolution.width}:${_resolution.height},setdar=dar=${_resolution.width / _resolution.height}$transitionMapVariable;");
    filterStrings.add(
        "$videoOutputMapVariable${transitionMapVariable}overlay=enable='between(t\\,0,${fileInfo.duration - fileInfo.transitionPoint})'$transitionMergedMapVariable;");
    videoOutputMapVariable = transitionMergedMapVariable;
  }

  if (nextTransition != null &&
      nextTransition.type == ETransitionType.overlay) {
    final OverlayTransitionData transitionData =
        nextTransition as OverlayTransitionData;
    final TransitionFileInfo fileInfo = transitionData.fileMap[_ratio]!;

    String transitionMapVariable = "[next_trans]";
    String transitionMergedMapVariable = "[next_trans_merged]";

    inputArguments.addAll([
      "-c:v",
      "libvpx-vp9",
      "-itsoffset",
      (duration - fileInfo.transitionPoint).toString(),
      "-i",
      "$resourceDirPath/${fileInfo.source.name}"
    ]);
    filterStrings.add(
        "[${inputFileCount++}:v]scale=${_resolution.width}:${_resolution.height},setdar=dar=${_resolution.width / _resolution.height}$transitionMapVariable;");
    filterStrings.add(
        "$videoOutputMapVariable${transitionMapVariable}overlay=enable='between(t\\,${duration - fileInfo.transitionPoint},$duration)'$transitionMergedMapVariable;");
    videoOutputMapVariable = transitionMergedMapVariable;
  }

  // filterStrings.add(
  //     "${videoOutputMapVariable}trim=0:$duration,setpts=PTS-STARTPTS[trim_vid];");
  // videoOutputMapVariable = "[trim_vid]";

  // filterStrings.add(
  //     "${videoOutputMapVariable}zoompan=z='zoom+0.01':d=${duration * _framerate},crop=in_w:in_h:ow/2-(in_w/(1+n*0.01))/2:'oh/2-(in_h/(1+n*0.01))/2'[ani_vid];");
  // videoOutputMapVariable = "[ani_vid]";

  filterStrings.add(
      "${videoOutputMapVariable}trim=0:$duration,setpts=PTS-STARTPTS[trim_vid];");
  videoOutputMapVariable = "[trim_vid]";

  filterStrings.add(
      "${videoOutputMapVariable}scale=$_scaledVideoWidth:$_scaledVideoHeight,setdar=dar=${_scaledVideoWidth / _scaledVideoHeight}[out_vid];");
  videoOutputMapVariable = "[out_vid]";

  // generate -filter_complex
  String filterComplexStr = "";
  for (final String filterStr in filterStrings) {
    filterComplexStr += filterStr;
  }

  // String inputArgumentsStr = "";
  // for (final String inputStr in inputArguments) {
  //   inputArgumentsStr += inputStr + ',';
  // }

  if (filterComplexStr.endsWith(";")) {
    filterComplexStr =
        filterComplexStr.substring(0, filterComplexStr.length - 1);
  }

  arguments.addAll(inputArguments);
  arguments.addAll(["-filter_complex", filterComplexStr]);
  arguments.addAll([
    "-map",
    videoOutputMapVariable,
    "-map",
    audioOutputMapVariable,
    "-c:v",
    "libx264",
    "-preset",
    "superfast",
    "-c:a",
    "aac",
    "-b:a",
    "256k",
    "-maxrate",
    "5M",
    "-bufsize",
    "5M",
    "-pix_fmt",
    "yuv420p",
    "-r",
    _framerate.toString(),
    "-shortest",
    // '-vf',
    // "zoompan=z='zoom+0.01':d=100, scale='if(gte(zoom,1),iw,iw*zoom)':'if(gte(zoom,1),ih,ih*zoom)'",
    outputPath,
    "-y"
  ]);

  // File file = File('$resourceDirPath/arguments.json');
  // if (await file.exists()) {
  // } else {
  //   file.writeAsString(arguments.toString());
  // }

  await _ffmpegManager.execute(arguments, ffmpegCallback);

  // final String outputPath0 = "$appDirPath/clip${clipIdx}0.mp4";
  // // final String temp = "$appDirPath/temp.mp4";

  // // final String zoomCommand =
  // // "-i $inputPath -vf \"zoompan=z='if(lte(zoom,1.0),1.5,max(1.001,zoom-0.001))':d=100, crop=in_w:in_h:ow/2-(in_w/zoom)/2:oh/2-(in_h/zoom)/2\" -c:a copy $outputPath0 -y";
  // // final String cropCommand =
  // //     "-i $temp -vf \"crop=in_w:in_h:ow/2-(in_w/zoom)/2:oh/2-(in_h/zoom)/2\" -c:a copy $outputPath0 -y";

  // await _ffmpegManager.execute([
  //   '-i',
  //   outputPath,
  //   '-vf',
  //   "zoompan=z='if(lte(zoom,1.0),1.5,max(1.001,zoom-0.001))':d=100",
  //   outputPath0,
  //   '-y'
  // ], (res) {});
  // await FFmpegKit.executeAsync(cropCommand);
// ffmpeg -i input.mp4 -vf "zoompan=z='if(lte(zoom,1.0),1.5,max(1.001,zoom-0.001))':d=100, crop=in_w:in_h:ow/2-(in_w/zoom)/2:oh/2-(in_h/zoom)/2" output.mp4
  return RenderedData(outputPath, duration);
}

Future<RenderedData> applyXFadeTransitions(
    RenderedData curClip,
    RenderedData nextClip,
    int clipIdx,
    String xfadeKey,
    double xfadeDuration,
    Function(Statistics)? ffmpegCallback) async {
  final String appDirPath = await getAppDirectoryPath();
  final String outputPath = "$appDirPath/xfade_merged$clipIdx.mp4";

  final double xfadeOffset =
      normalizeTime(curClip.duration - xfadeDuration - 0.01);
  double duration = normalizeTime(
      curClip.duration + nextClip.duration - xfadeDuration - 0.01);

  String filterComplexStr = "";
  filterComplexStr +=
      "[0:v][1:v]xfade=transition=$xfadeKey:duration=$xfadeDuration:offset=$xfadeOffset[trans_applied];[trans_applied]trim=0:$duration,setpts=PTS-STARTPTS[vid];";
  filterComplexStr +=
      "[1:a]adelay=${(xfadeOffset * 1000).floor()}|${(xfadeOffset * 1000).floor()}[delayed];";
  filterComplexStr +=
      "[0:a][delayed]amix=inputs=2:dropout_transition=99999,volume=2[aud]";

  await _ffmpegManager.execute([
    "-i",
    curClip.absolutePath,
    "-i",
    nextClip.absolutePath,
    "-filter_complex",
    filterComplexStr,
    "-map",
    "[vid]",
    "-map",
    "[aud]",
    "-c:v",
    "libx264",
    "-preset",
    "superfast",
    "-c:a",
    "aac",
    "-b:a",
    "256k",
    "-maxrate",
    "5M",
    "-bufsize",
    "5M",
    "-pix_fmt",
    "yuv420p",
    "-r",
    _framerate.toString(),
    outputPath,
    "-y"
  ], ffmpegCallback);
  return RenderedData(outputPath, duration);
}

Future<RenderedData> applyFadeOut(List<RenderedData> clips) async {
  final String appDirPath = await getAppDirectoryPath();
  final String outputPath = "$appDirPath/fade_out_applied.mp4";

  final List<String> arguments = [];
  final List<String> inputArguments = [];
  String filterComplexStr = "";

  double totalDuration = 0;
  for (int i = 0; i < clips.length; i++) {
    RenderedData clip = clips[i];

    inputArguments.addAll(["-i", clip.absolutePath]);

    filterComplexStr += "[$i]";
    totalDuration += clip.duration;
  }

  filterComplexStr +=
      "concat=n=${clips.length}:v=1:a=1[outv][outa];[outv]fade=t=out:st=${totalDuration - 1.5}:d=1.5[faded]";

  arguments.addAll(inputArguments);
  arguments.addAll(["-filter_complex", filterComplexStr]);
  arguments.addAll([
    "-map",
    "[faded]",
    "-map",
    "[outa]",
    "-c:v",
    "libx264",
    "-preset",
    "superfast",
    "-c:a",
    "aac",
    "-b:a",
    "256k",
    "-maxrate",
    "5M",
    "-bufsize",
    "5M",
    "-pix_fmt",
    "yuv420p",
    "-r",
    _framerate.toString(),
    outputPath,
    "-y"
  ]);
  await _ffmpegManager.execute(arguments, null);
  return RenderedData(outputPath, totalDuration);
}

Future<RenderedData> mergeAllClips(List<RenderedData> clipList) async {
  final String appDirPath = await getAppDirectoryPath();

  List<RenderedData> filteredClipList = [];
  for (int i = 0; i < clipList.length; i++) {
    final RenderedData clip = clipList[i];
    if (clip.duration > 0) filteredClipList.add(clip);
  }

  List<RenderedData> mergedClipList = [];
  List<RenderedData> currentList = [];

  final File mergeTextFile = File("$appDirPath/merge.txt");
  double totalDuration = 0;

  for (int i = 0; i < filteredClipList.length; i++) {
    final RenderedData clipData = filteredClipList[i];
    currentList.add(clipData);
    totalDuration += clipData.duration;

    if (currentList.length >= 50 || i == filteredClipList.length - 1) {
      final String videoOutputPath =
          "$appDirPath/part_merged_video${mergedClipList.length}.mp4";
      final String audioOutputPath =
          "$appDirPath/part_merged_audio${mergedClipList.length}.m4a";
      final String mergeOutputPath =
          "$appDirPath/part_merged_all${mergedClipList.length}.mp4";
      double mergedDuration = 0;

      if (currentList.length == 1) {
        mergedDuration += currentList[0].duration;
        await _ffmpegManager.execute([
          "-i",
          currentList[0].absolutePath,
          "-map",
          "0:v",
          "-c:v",
          "copy",
          videoOutputPath,
          "-y"
        ], null);

        await _ffmpegManager.execute([
          "-i",
          currentList[0].absolutePath,
          "-map",
          "0:a",
          "-c:a",
          "copy",
          audioOutputPath,
          "-y"
        ], null);
      } //
      else {
        List<String> audioArguments = [];

        String videoMergeTargets = "";
        String audioMergeTargets = "";
        String audioFilterComplexStr = "";

        int currentDurationMS = 0;
        for (int j = 0; j < currentList.length; j++) {
          videoMergeTargets += "file '${currentList[j].absolutePath}'\n";
          mergedDuration += currentList[j].duration;

          String audioOutputVariable = "[aud$j]";
          audioArguments.addAll(["-i", currentList[j].absolutePath]);
          audioFilterComplexStr +=
              "[$j:a]atrim=0:${currentList[j].duration},asetpts=PTS-STARTPTS,adelay=$currentDurationMS|$currentDurationMS$audioOutputVariable;";
          audioMergeTargets += audioOutputVariable;

          currentDurationMS += (currentList[j].duration * 1000).floor();
        }
        await mergeTextFile.writeAsString(videoMergeTargets);

        await _ffmpegManager.execute([
          "-f",
          "concat",
          "-safe",
          "0",
          "-i",
          mergeTextFile.path,
          "-c",
          "copy",
          videoOutputPath,
          "-y"
        ], null);

        audioFilterComplexStr +=
            "${audioMergeTargets}amix=inputs=${currentList.length}:dropout_transition=99999,volume=${currentList.length}[out]";
        audioArguments.addAll([
          "-filter_complex",
          audioFilterComplexStr,
          "-map",
          "[out]",
          "-c:a",
          "aac",
          "-b:a",
          "256k",
          audioOutputPath,
          "-y"
        ]);

        await _ffmpegManager.execute(audioArguments, null);
      }

      await _ffmpegManager.execute([
        "-i",
        videoOutputPath,
        "-i",
        audioOutputPath,
        "-map",
        "0:v",
        "-map",
        "1:a",
        "-c",
        "copy",
        mergeOutputPath,
        "-y"
      ], null);

      mergedClipList.add(RenderedData(mergeOutputPath, mergedDuration));
      currentList = [];
    }
  }

  final String videoOutputPath = "$appDirPath/allclip_merged_video.mp4";
  final String audioOutputPath = "$appDirPath/allclip_merged_audio.m4a";
  final String mergeOutputPath = "$appDirPath/allclip_merged_all.mp4";

  if (mergedClipList.length == 1) {
    await _ffmpegManager.execute([
      "-i",
      mergedClipList[0].absolutePath,
      "-map",
      "0:v",
      "-c:v",
      "copy",
      videoOutputPath,
      "-y"
    ], null);

    await _ffmpegManager.execute([
      "-i",
      mergedClipList[0].absolutePath,
      "-map",
      "0:a",
      "-c:a",
      "copy",
      audioOutputPath,
      "-y"
    ], null);
  } //
  else {
    List<String> audioArguments = [];

    String videoMergeTargets = "";
    String audioMergeTargets = "";
    String audioFilterComplexStr = "";

    int currentDurationMS = 0;
    for (int j = 0; j < mergedClipList.length; j++) {
      videoMergeTargets += "file '${mergedClipList[j].absolutePath}'\n";

      String audioOutputVariable = "[aud$j]";
      audioArguments.addAll(["-i", mergedClipList[j].absolutePath]);
      audioFilterComplexStr +=
          "[$j:a]atrim=0:${mergedClipList[j].duration},asetpts=PTS-STARTPTS,adelay=$currentDurationMS|$currentDurationMS$audioOutputVariable;";
      audioMergeTargets += audioOutputVariable;

      currentDurationMS += (mergedClipList[j].duration * 1000).floor();
    }
    await mergeTextFile.writeAsString(videoMergeTargets);

    await _ffmpegManager.execute([
      "-f",
      "concat",
      "-safe",
      "0",
      "-i",
      mergeTextFile.path,
      "-c",
      "copy",
      videoOutputPath,
      "-y"
    ], null);

    audioFilterComplexStr +=
        "${audioMergeTargets}amix=inputs=${mergedClipList.length}:dropout_transition=99999,volume=${mergedClipList.length / 2}[merged];[merged]afade=t=out:st=${max(totalDuration - _fadeDuration, 0)}:d=$_fadeDuration[out]";
    audioArguments.addAll([
      "-filter_complex",
      audioFilterComplexStr,
      "-map",
      "[out]",
      "-c:a",
      "aac",
      "-b:a",
      "256k",
      audioOutputPath,
      "-y"
    ]);

    await _ffmpegManager.execute(audioArguments, null);
  }

  await _ffmpegManager.execute([
    "-i",
    videoOutputPath,
    "-i",
    audioOutputPath,
    "-map",
    "0:v",
    "-map",
    "1:a",
    "-c",
    "copy",
    mergeOutputPath,
    "-y"
  ], null);

  return RenderedData(mergeOutputPath, totalDuration);
}

Future<RenderedData> applyMusics(
    RenderedData mergedClip, List<MusicData> musics) async {
  final List<String> arguments = <String>[];
  final String appDirPath = await getAppDirectoryPath();
  final String outputPath = "$appDirPath/result.mp4";

  final List<String> inputArguments = <String>[];
  final List<String> filterStrings = <String>[];

  int inputFileCount = 0;

  inputArguments.addAll(["-i", mergedClip.absolutePath]);
  inputFileCount++;

  if (musics.isEmpty) {
    inputArguments.addAll([
      "-f",
      "lavfi",
      "-t",
      mergedClip.duration.toString(),
      "-i",
      "anullsrc=channel_layout=stereo:sample_rate=44100"
    ]);

    filterStrings.add("[$inputFileCount:a]volume=0[bgm];");
    inputFileCount++;
  } else if (musics.length == 1) {
    final MusicData musicData = musics[0];
    final double duration = musicData.duration;

    inputArguments.addAll(["-i", musicData.absolutePath!]);
    filterStrings.add(
        "[$inputFileCount:a]volume=${musicData.volume}[volume_applied_0];[volume_applied_0]afade=t=out:st=${max(duration - _fadeDuration, 0)}:d=$_fadeDuration[faded0];[faded0]atrim=0:$duration[bgm];");
    inputFileCount++;
  } //
  else {
    String mergeBgmTargets = "";
    for (int i = 0; i < musics.length; i++) {
      final MusicData musicData = musics[i];
      final double duration = musicData.duration;

      inputArguments.addAll(["-i", musicData.absolutePath!]);
      filterStrings.add(
          "[$inputFileCount:a]volume=${musicData.volume}[volume_applied_$i];[volume_applied_$i]afade=t=out:st=${max(duration - _fadeDuration, 0)}:d=$_fadeDuration[faded$i];[faded$i]atrim=0:$duration[aud$inputFileCount];");
      mergeBgmTargets += "[aud$inputFileCount]";
      inputFileCount++;
    }
    filterStrings
        .add("${mergeBgmTargets}concat=n=${musics.length}:v=0:a=1[bgm];");
  }

  filterStrings.addAll([
    "[0:a]volume=0.8[merge_audio];[merge_audio][bgm]amix=inputs=2:dropout_transition=99999,volume=2[merged];[merged]atrim=0:${mergedClip.duration}[trimed];[trimed]afade=t=out:st=${max(mergedClip.duration - _fadeDuration, 0)}:d=$_fadeDuration[out]"
  ]);

  String filterComplexStr = "";
  for (final String filterStr in filterStrings) {
    filterComplexStr += filterStr;
  }

  arguments.addAll(inputArguments);
  arguments.addAll(["-filter_complex", filterComplexStr]);
  arguments.addAll([
    "-map",
    "0:v",
    "-map",
    "[out]",
    "-c:v",
    "copy",
    "-c:a",
    "aac",
    "-b:a",
    "256k",
    "-shortest",
    outputPath,
    "-y"
  ]);

  await _ffmpegManager.execute(arguments, null);
  return RenderedData(outputPath, mergedClip.duration);
}

Future<String?> extractThumbnailAvailable(EditedMedia editedMedia) async {
  final List<String> arguments = <String>[];
  final String appDirPath = await getAppDirectoryPath();
  final String outputPath =
      "$appDirPath/${editedMedia.mediaData.key}${(editedMedia.startTime * 1000).toInt()}.jpg";
  if (await File(outputPath).exists()) {
    return outputPath;
  }

  final List<String> inputArguments = <String>[];
  // final List<String> filterStrings = <String>[];

  final MediaData mediaData = editedMedia.mediaData;
  inputArguments.addAll(["-i", mediaData.absolutePath]);

  if (mediaData.type == EMediaType.video) {
    inputArguments.addAll(["-ss", '00:00:01']);
  }

  // filterStrings.add(
  //     "${_getTransposeFilter(mediaData.orientation)}crop=iw:ih,scale=(iw*0.5):(ih*0.5),setdar=dar=${mediaData.width / mediaData.height}");

  // String filterComplexStr = "";
  // for (final String filterStr in filterStrings) {
  //   filterComplexStr += filterStr;
  // }

  arguments.addAll(inputArguments);
  // arguments.addAll(["-filter_complex", filterComplexStr]);
  arguments.addAll(["-vframes", "1", '-vf', 'scale=iw/2:-1', outputPath, "-y"]);

  await _ffmpegManager.execute(arguments, null);
  return outputPath;
}

Future<String?> extractThumbnail(EditedMedia editedMedia) async {
  final List<String> arguments = <String>[];
  final String appDirPath = await getAppDirectoryPath();
  final String outputPath =
      "$appDirPath/${editedMedia.mediaData.key}${(editedMedia.startTime * 1000).toInt()}.jpg";
  if (await File(outputPath).exists()) {
    return outputPath;
  }

  final List<String> inputArguments = <String>[];
  final List<String> filterStrings = <String>[];

  final MediaData mediaData = editedMedia.mediaData;
  inputArguments.addAll(["-i", mediaData.scaledPath ?? mediaData.absolutePath]);

  if (mediaData.type == EMediaType.video) {
    inputArguments.addAll(["-ss", editedMedia.startTime.toString()]);
  }

  // int cropLeft = max(0, (mediaData.width * editedMedia.cropLeft).floor());
  // int cropRight =
  //     min(mediaData.width, (mediaData.width * editedMedia.cropRight).floor());
  // int cropTop = max(0, (mediaData.height * editedMedia.cropTop).floor());
  // int cropBottom = min(
  //     mediaData.height, (mediaData.height * editedMedia.cropBottom).floor());

  // int cropWidth = cropRight - cropLeft;
  // int cropHeight = cropBottom - cropTop;
  // filterStrings.add(
  //     "${_getTransposeFilter(mediaData.orientation)}crop=$cropWidth:$cropHeight:$cropLeft:$cropTop,scale=${(_scaledVideoWidth / 2).floor()}:${(_scaledVideoHeight / 2).floor()},setdar=dar=${_scaledVideoWidth / _scaledVideoHeight}");

  filterStrings.add(
      "${_getTransposeFilter(mediaData.orientation)}scale=${(_scaledVideoWidth / 2).floor()}:${(_scaledVideoHeight / 2).floor()},setdar=dar=${_scaledVideoWidth / _scaledVideoHeight}");

  String filterComplexStr = "";
  for (final String filterStr in filterStrings) {
    filterComplexStr += filterStr;
  }

  arguments.addAll(inputArguments);
  arguments.addAll(["-filter_complex", filterComplexStr]);
  arguments.addAll(["-vframes", "1", outputPath, "-y"]);

  await _ffmpegManager.execute(arguments, null);
  return outputPath;
}

Future<String?> extractThumbnailPath(String path) async {
  String inputFilePath = path;
  final String appDirPath = await getAppDirectoryPath();
  final String outputFilePath =
      "$appDirPath/${p.basename(path).split('.').first}.jpg";
  final List<String> arguments = [
    '-i',
    inputFilePath,
    '-vframes',
    '1',
    outputFilePath
  ];
  await _ffmpegManager.execute(arguments, null);

  return outputFilePath;
}

Future<String?> extractMp4FromWebm(String path) async {
  String inputFilePath = path;
  final String appDirPath = await getAppDirectoryPath();
  final String outputFilePath =
      "$appDirPath/${p.basename(path).split('.').first}.mp4";
  File file = File(outputFilePath);
  if (await file.exists()) {
    await file.delete();
    // return outputFilePath;
  }
  // '-i $inputFilePath -c:v libx264 -vf "format=yuva420p,chromakey=green:0.1:0.2,split[m][a];[m]setpts=PTS-STARTPTS[m];[a]alphaextract,format=yuv420p[a];[m][a]alphamerge" -c:v libx264 -c:a copy $outputFilePath';
  // '-c:v libx264 -c:a copy $outputFilePath'
  final List<String> arguments = [
    '-i',
    inputFilePath,
    '-c:v',
    'libx264',
    '-vf',
    'format=yuva420p,chromakey=black:0.1:0.2,split[m][a];[m]setpts=PTS-STARTPTS[m];[a]alphaextract,format=yuv420p[a];[m][a]alphamerge',
    '-c:v',
    'libx264',
    '-c:a',
    'copy',
    outputFilePath
  ];
  await _ffmpegManager.execute(arguments, null);

  return outputFilePath;
}

Future<String?> extractGifThumbnailPathBy(ResourceFileInfo fileInfo) async {
  var path = await ResourceManager.getInstance().loadResourceFile(fileInfo);
  return extractGifThumbnailPath(path, width: 320);
}

Future<String?> extractGifThumbnailPath(String path, {int width = 320}) async {
  String inputFilePath = path;
  final String appDirPath = await getAppResourcePath();
  final String outputFilePath =
      "$appDirPath/${p.basename(path).split('.').first}$width.gif";
  File file = File(outputFilePath);
  if (await file.exists()) {
    // await file.delete();
    return outputFilePath;
  }
  // final List<String> arguments = [
  //   '-i',
  //   inputFilePath,
  //   '-vf',
  //   // 'scale=320:-1',
  //   'scale=320:-1,chromakey=black:0.0001:0.0002,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse',
  //   '-t',
  //   '10',
  //   '-r',
  //   '10',
  //   '-f',
  //   'gif',
  //   outputFilePath
  // ];

  final List<String> arguments = [
    '-c:v',
    'libvpx-vp9',
    '-i',
    inputFilePath,
    '-vf',
    'scale=$width:-1,split[a][b];[a]palettegen=reserve_transparent=on[p];[b][p]paletteuse',
    '-loop',
    '0',
    outputFilePath
  ];

  await _ffmpegManager.execute(arguments, null);

  return outputFilePath;
}

Future<MediaData> scaleImageMedia(MediaData mediaData) async {
  if (mediaData.type == EMediaType.video) return mediaData;

  final List<String> arguments = <String>[];
  final String appDirPath = await getAppDirectoryPath();
  final String outputPath = "$appDirPath/${Uuid().v4()}.jpg";

  final List<String> inputArguments = <String>[];
  final List<String> filterStrings = <String>[];

  const int scaleTargetSize = 1440;
  double imageScaleFactor =
      (scaleTargetSize * 1.0) / min(mediaData.width, mediaData.height);

  if (imageScaleFactor >= 1) return mediaData;
  inputArguments.addAll(["-i", mediaData.absolutePath]);

  int scaledWidth =
      _getEvenNumber((mediaData.width * imageScaleFactor).floor());
  int scaledHeight =
      _getEvenNumber((mediaData.height * imageScaleFactor).floor());

  filterStrings.add(
      "${_getTransposeFilter(mediaData.orientation)}scale=$scaledWidth:$scaledHeight,setdar=dar=${scaledWidth / scaledHeight}");

  String filterComplexStr = "";
  for (final String filterStr in filterStrings) {
    filterComplexStr += filterStr;
  }

  arguments.addAll(inputArguments);
  arguments.addAll(["-filter_complex", filterComplexStr]);
  arguments.addAll([outputPath, "-y"]);

  await _ffmpegManager.execute(arguments, null);

  final MediaData resultData = MediaData(
      mediaData.key,
      mediaData.absolutePath,
      mediaData.type,
      scaledWidth,
      scaledHeight,
      mediaData.orientation,
      mediaData.duration,
      mediaData.createDate,
      mediaData.gpsString,
      mediaData.mlkitDetected);
  resultData.scaledPath = outputPath;

  return resultData;
}

Future<String> extractVideoFromImageMedia(EditedMedia editedMedia) async {
  final mediaData = editedMedia.mediaData;
  final String inputFilePath = mediaData.scaledPath ?? mediaData.absolutePath;
  final String appDirPath = await getAppDirectoryPath();
  final int duration = editedMedia.duration.toInt();
  final String outputFilePath =
      "$appDirPath/${p.basename(inputFilePath).split('.').first}$duration.mp4";
  final File file = File(outputFilePath);
  if (await file.exists()) {
  } else {
    final List<String> arguments = [
      '-framerate',
      '$duration',
      '-loop',
      '1',
      '-i',
      inputFilePath,
      '-t',
      '$duration',
      '-vf',
      'scale=iw:-1',
      outputFilePath
    ];
    await _ffmpegManager.execute(arguments, null);
  }
  mediaData.imageVideoPath = outputFilePath;
  return outputFilePath;
}

Future<String> appendVideos(List<EditedMedia> list) async {
  final String appDirPath = await getAppDirectoryPath();
  final String outputFilePath = "${appDirPath}/appendVideos.mp4";

  final File mergeTextFile = File("$appDirPath/merge.txt");
  String str = '';
  for (var element in list) {
    // arguments.addAll(['-i', element.mediaData.absolutePath, '-t', '5000']);
    str += "file '${element.mediaData.imageVideoPath!}\n";
  }
  await mergeTextFile.writeAsString(str);
  await _ffmpegManager.execute([
    '-f',
    "concat",
    "-safe",
    "0",
    '-i',
    mergeTextFile.path,
    '-c',
    'copy',
    outputFilePath,
    '-y'
  ], (p0) => null);
  return outputFilePath;
}

int getFramerate() {
  return _framerate;
}

double normalizeTime(double duration) {
  duration -= duration % _minDurationFactor;
  return (duration * 1000).floor() / 1000.0;
}
