package cn.gitee.ffmpeg.processed.manager;


import cn.gitee.ffmpeg.processed.config.CommonThreadPool;
import cn.gitee.ffmpeg.processed.config.FFmpegFontProperties;
import cn.gitee.ffmpeg.processed.config.FFmpegLocalFileContext;
import cn.gitee.ffmpeg.processed.config.FFmpegProperties;
import cn.gitee.ffmpeg.processed.constants.FFmpegConstant;
import cn.gitee.ffmpeg.processed.dto.ComposeVideoDTO;
import cn.gitee.ffmpeg.processed.dto.WatermarkInfoDTO;
import cn.gitee.ffmpeg.processed.utils.FFmpegUtil;
import cn.gitee.ffmpeg.processed.utils.ParamVerifyUtil;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.http.HttpUtil;
import lombok.extern.slf4j.Slf4j;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;

import java.io.File;
import java.math.RoundingMode;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

/**
 * @author xuhainan
 * @date 2023/3/6 14:33
 * @region hefei
 */
@Slf4j
@Component
public class FFmpegManager implements ApplicationContextAware {

    private final static String VIDEO = "video";
    private final static String AUDIO = "audio";
    private final static String NUMBER_STR = "number";
    private final static String COMPLETED = "completed";
    private final static String START = "start";
    private final static String CI = "次";
    private final static String SECOND = "秒";
    private final static String TIME_START = "00:00:00";
    private FFmpegProperties fFmpegProperties;

    private RedissonClient redissonClient;

    private String getDirPrefix(String uniqueCode, boolean isStart) {
        String baseDir = fFmpegProperties.getLocalDirPrefix() + File.separator + uniqueCode + File.separator;
        if (isStart) {
            baseDir += START + File.separator;
        }
        return baseDir;
    }

    public String proceedCompletedVideo(ComposeVideoDTO composeVideoDTO) {
        log.info("proceedCompletedVideo 创建视频入参：【{}】", composeVideoDTO);
        String uniqueCode = composeVideoDTO.getUniqueCode();
        RLock lock = redissonClient.getLock(FFmpegConstant.CREATING_PREFIX + uniqueCode);
        if (lock.tryLock()) {
            try {
                return doProceedCompletedVideo(composeVideoDTO);
            } finally {
                lock.unlock();
            }
        } else {
            log.info("视频[{}]正在创建中....", uniqueCode);
            return null;
        }
    }


    /**
     * 生成动作视频
     *
     * @param composeVideoDTO 入参
     * @return 动作视频路径
     */
    public String doProceedCompletedVideo(ComposeVideoDTO composeVideoDTO) {
        log.info("doProceedCompletedVideo 执行创建视频入参：【{}】", composeVideoDTO);
        String uniqueCode = composeVideoDTO.getUniqueCode();

        AtomicReference<String> videoPathRef = new AtomicReference<>();

        CompletableFuture<String> startVideoFuture = CompletableFuture.supplyAsync(() -> {
            // 1 生成开头视频
            String startVideo = doComposeStartVideo(composeVideoDTO);
            FFmpegLocalFileContext.set(uniqueCode, startVideo);
            return startVideo;
        }, CommonThreadPool.THREAD_POOL);
        String videoPath = this.obtainBaseVideo(uniqueCode, composeVideoDTO.getBodyVideoPath());
        CompletableFuture<String> bodyVideoFuture = CompletableFuture.supplyAsync(() -> {
            String type = composeVideoDTO.getType();
            // 2 根据类型，生成对应正文视频
            String bodyVideo;
            switch (type) {
                case "0" -> {
                    int frequency = composeVideoDTO.getFrequency();
                    bodyVideo = trainingContentWithNumber(videoPath, frequency, uniqueCode);
                }
                case "1" -> {
                    int minute = composeVideoDTO.getMinute();
                    int second = composeVideoDTO.getSecond();
                    int secondTime = minute * 60 + second;
                    bodyVideo = trainingContentWithTime(videoPath, secondTime, uniqueCode);
                }
                default -> throw new IllegalArgumentException("无效的动作类型" + type);
            }
            FFmpegLocalFileContext.set(uniqueCode, bodyVideo);

            return bodyVideo;
        }, CommonThreadPool.THREAD_POOL);

        startVideoFuture.thenAcceptBoth(bodyVideoFuture, (startVideo, bodyVideo) -> {
            String dirPrefix = getDirPrefix(uniqueCode, false);
            // video
            String videoSuffix = StrUtil.DOT + FileUtil.getSuffix(videoPath);
            // 视频路径
            String completedVideoPath = dirPrefix + VIDEO + File.separator + uniqueCode + videoSuffix;
            List<String> videos = Arrays.asList(startVideo, bodyVideo);
            boolean mergeVideoWithTxt = FFmpegUtil.mergeVideo(videos, completedVideoPath, COMPLETED + File.separator);
            ParamVerifyUtil.isFalse(mergeVideoWithTxt, "开头视频与正文视频合并失败");
            FFmpegLocalFileContext.set(uniqueCode, completedVideoPath);
            // 执行回调
            doCallback(uniqueCode, completedVideoPath);
        }).whenCompleteAsync((r, e) -> {
            log.info("执行成功" + e);
            // 异步删除本地中间文件
            List<String> clearFilePath = FFmpegLocalFileContext.clear(uniqueCode);
            log.info("待删除的文件路径：{}", clearFilePath);
            if (CollUtil.isNotEmpty(clearFilePath)) {
                clearFilePath.forEach(FileUtil::del);
            }
        }, CommonThreadPool.THREAD_POOL).join();

        return videoPathRef.get();
    }

    /**
     * 生成开头视频
     * {User.Dir}/hashCode(动作id + 类型 + 次数or时间)/
     * |-- start/audio/
     * |-- start/video/
     *
     * @return 视频路径
     */

    public String doComposeStartVideo(ComposeVideoDTO composeVideoDTO) {
        log.info("doComposeStartVideo 创建开头视频入参：【{}】", composeVideoDTO);
        String uniqueCode = composeVideoDTO.getUniqueCode();
        String type = composeVideoDTO.getType();
        // 源文件
        String videoPath = this.obtainBaseVideo(uniqueCode, composeVideoDTO.getStartVideoPath());
        String audioPath = this.obtainBaseAudio(uniqueCode, composeVideoDTO.getAudioPath());

        int minute = composeVideoDTO.getMinute();
        int second = composeVideoDTO.getSecond();
        int frequency = composeVideoDTO.getFrequency();

        String dirPrefix = getDirPrefix(uniqueCode, true);
        // audio
        String audioSuffix = StrUtil.DOT + FileUtil.getSuffix(audioPath);
        // 合成音频
        String startAudioPath = dirPrefix + AUDIO + File.separator + START + audioSuffix;
        FileUtil.mkParentDirs(startAudioPath);
        boolean startAudio = startAudio(audioPath, minute, second, frequency, startAudioPath, type, uniqueCode);
        ParamVerifyUtil.isFalse(startAudio, "开头音频生成失败");

        // 标识动作的开始音频
        FFmpegLocalFileContext.set(uniqueCode, startAudioPath);

        // video
        String videoSuffix = StrUtil.DOT + "ts";
        String videoBaseDir = dirPrefix + VIDEO + File.separator;
        // 切分视频
        long audioTime = FFmpegUtil.sourceTime(startAudioPath);
        String startVideoPath = videoBaseDir + "step1" + videoSuffix;
        FileUtil.mkParentDirs(startVideoPath);
        boolean videoLoop = videoLoop(uniqueCode, videoPath, audioTime, startVideoPath);
        ParamVerifyUtil.isFalse(videoLoop, "开头纯视频生成失败");
        // 标识动作的开始音频
        FFmpegLocalFileContext.set(uniqueCode, startVideoPath);

        // 生成水印
        String watermarkStartVideo = videoBaseDir + "step2" + videoSuffix;
        FileUtil.mkParentDirs(watermarkStartVideo);
        String timeOrFrequencyDesc = "";
        if (frequency > 0) {
            timeOrFrequencyDesc = frequency + CI;
        } else {
            if (minute > 0 || second > 0) {
                int secondTime = minute * 60 + second;
                timeOrFrequencyDesc = secondTime + SECOND;
            }
        }
        boolean doWatermarkVideo = doWatermarkVideo(startVideoPath, watermarkStartVideo, timeOrFrequencyDesc);
        ParamVerifyUtil.isFalse(doWatermarkVideo, "开头视频水印添加失败");
        // 标识带水印视频
        FFmpegLocalFileContext.set(uniqueCode, watermarkStartVideo);

        // 音频视频合并
        String audioAndVideoPath = videoBaseDir + uniqueCode + videoSuffix;
        FileUtil.mkParentDirs(audioAndVideoPath);
        boolean mergeAudioToVideo = FFmpegUtil.mergeAudioToVideo0(watermarkStartVideo, startAudioPath, audioAndVideoPath);
        if (mergeAudioToVideo) {
            // 标识动作的完整视频
            FFmpegLocalFileContext.set(uniqueCode, audioAndVideoPath);
            return audioAndVideoPath;
        }

        throw new IllegalArgumentException("开头视频生成失败");
    }

    /**
     * 视频循环截取
     *
     * @param uniqueCode 动作视频唯一编码
     * @param videoPath  视频
     * @param time       持续时间 （毫秒）
     * @param outPath    输出
     */
    private boolean videoLoop(String uniqueCode, String videoPath, long time, String outPath) {
        log.info("videoLoop 循环截取视频入参：uniqueCode=【{}】，videoPath=【{}】，time=【{}】，outPath=【{}】", uniqueCode, videoPath, time, outPath);
        long videoTime = FFmpegUtil.sourceTime(videoPath);

        int count = Double.valueOf(NumberUtil.div(time, videoTime, 0, RoundingMode.UP)).intValue();

        String resultVideo;
        if (count > 1) {
            List<String> videos = IntStream.range(0, count)
                    .mapToObj(i -> videoPath)
                    .collect(Collectors.toList());
            resultVideo = FFmpegUtil.obtainParentFolder(outPath) + FFmpegUtil.obtainFileName(outPath) + "-loop.mp4";
            FileUtil.mkParentDirs(resultVideo);
            boolean mergeVideoWithTxt = FFmpegUtil.mergeVideoWithTxt(videos, resultVideo, "video-loop" + File.separator);
            ParamVerifyUtil.isFalse(mergeVideoWithTxt, "循环处理合并视频失败");
            FFmpegLocalFileContext.set(uniqueCode, resultVideo);
        } else {
            resultVideo = videoPath;
        }
        String timeSecond = String.valueOf(NumberUtil.div(time, 1000, 2, RoundingMode.HALF_UP));
        return FFmpegUtil.cutVideo(resultVideo, TIME_START, timeSecond, null, outPath);
    }

    /**
     * 动作音频
     *
     * @param audioPath 动作音频
     * @param outPath   合成音频
     * @return 是否成功
     */
    private boolean startAudio(String audioPath, int minute, int second, int frequency, String outPath, String type, String uniqueCode) {
        log.info("startAudio 创建开头音频入参：audioPath=【{}】，minute =【{}】，second =【{}】，frequency =【{}】，" +
                "outPath =【{}】，type =【{}】，uniqueCode =【{}】", audioPath, minute, second, frequency, outPath, type, uniqueCode);
        // 动作名称音频+0.5s空音频+时长音频+1s空音频+321开始音频+1s空音频
        List<String> audios = new ArrayList<>();
        String startAudio = fFmpegProperties.getNetStartAudio();
        // 动作音频
        audios.add(audioPath);
        // 0.5s空音频
        audios.add(obtainEmptyAudio("0.5"));
        // 时长音频 or 次数音频
        timeOrFrequencyAudio(minute, second, frequency, type, audios::add);
        // 1s空音频
        audios.add(obtainEmptyAudio("1"));
        // 321开始音频
        audios.add(obtainBaseAudio(uniqueCode, startAudio));
        // 1s 空音频
        audios.add(obtainEmptyAudio("1"));

        return FFmpegUtil.concatAudio0(audios, outPath);
    }

    /**
     * 时长或次数音频
     * 优先次数
     *
     * @param minute    分钟
     * @param second    秒钟
     * @param frequency 次数
     * @param audios    音频集合
     */
    private void timeOrFrequencyAudio(int minute, int second, int frequency, String type, Consumer<String> audios) {
        if (minute == 0 && second == 0 && frequency == 0) {
            return;
        }
        String netCommonAudioPrefix = fFmpegProperties.getNetCommonAudioPrefix() + "/";
        String baseAudioSuffix = StrUtil.DOT + fFmpegProperties.getBaseAudioSuffix();
        if (StrUtil.equals(type, "0")) {
            // 添加次数音频
            frequencyAudio(frequency, audios);
        }
        if (StrUtil.equals(type, "1")) {
            // 添加时间音频
            if (second >= 60) {
                minute += (second / 60);
                second = second % 60;
            }

            if (minute < 60 && minute > 0) {
                audios.accept(obtainBaseAudio(netCommonAudioPrefix + "number/" + minute + baseAudioSuffix, "number"));
                if (second > 0) {
                    audios.accept(obtainBaseAudio(netCommonAudioPrefix + "unit_min" + baseAudioSuffix));
                } else {
                    audios.accept(obtainBaseAudio(netCommonAudioPrefix + "unit_minute" + baseAudioSuffix));
                }
            }

            if (second > 0) {
                audios.accept(obtainBaseAudio(netCommonAudioPrefix + "number/" + second + baseAudioSuffix));
                audios.accept(obtainBaseAudio(netCommonAudioPrefix + "unit_second" + baseAudioSuffix));
            }
        }
    }

    private void frequencyAudio(int frequency, Consumer<String> audios) {
        String netCommonAudioPrefix = fFmpegProperties.getNetCommonAudioPrefix() + "/";
        String baseAudioSuffix = StrUtil.DOT + fFmpegProperties.getBaseAudioSuffix();
        audios.accept(obtainBaseAudio(netCommonAudioPrefix + "number_times/" + frequency + baseAudioSuffix, "number_times"));
    }

    /**
     * 获取空音频
     *
     * @param time 时间
     * @return 路径
     */
    private String obtainEmptyAudio(String time) {
        String baseAudioDir = fFmpegProperties.getBaseAudioDir();
        String baseAudioSuffix = StrUtil.DOT + fFmpegProperties.getBaseAudioSuffix();
        String outPath = baseAudioDir + File.separator + "empty" + File.separator + time + baseAudioSuffix;
        if (!FileUtil.exist(outPath)) {
            log.info("[obtainEmptyAudio] Method executor time [{}] out [{}]", time, outPath);
            FileUtil.mkParentDirs(outPath);
            boolean emptyAudio = FFmpegUtil.processEmptyAudio(time, outPath);
            ParamVerifyUtil.isFalse(emptyAudio, String.format("%s 空白音频生成失败", time));
        }
        return outPath;
    }

    /**
     * 获取音频
     *
     * @param audioPath 音频路径
     * @return 本地路径
     */
    private String obtainBaseAudio(String audioPath) {
        return obtainBaseSource(null, audioPath, null, null);
    }

    /**
     * 获取音频
     *
     * @param prefix    路径前缀
     * @param audioPath 音频路径
     * @return 本地路径
     */
    private String obtainBaseAudio(String audioPath, String prefix) {
        return obtainBaseSource(null, audioPath, null, prefix);
    }

    /**
     * 获取音频
     *
     * @param audioPath 音频路径
     * @return 本地路径
     */
    private String obtainBaseAudio(Long trainingId, String audioPath) {
        return obtainBaseSource(trainingId.toString(), audioPath, null, null);
    }

    /**
     * 获取音频
     *
     * @param videoPath 视频路径
     * @return 本地路径
     */
    private String obtainBaseVideo(String uniqueCode, String videoPath) {
        return obtainBaseSource(uniqueCode, null, videoPath, null);
    }

    private String obtainBaseSource(String uniqueCode, String audioPath, String videoPath, String prefix) {
        String sourcePath = StrUtil.isNotBlank(audioPath) ? audioPath : videoPath;
        String name = FileUtil.getName(sourcePath);
        String outPath = null;
        String basePath = StrUtil.isBlank(uniqueCode) ? "common" : uniqueCode;
        String prefixPath = StrUtil.isNotBlank(prefix) ? basePath + File.separator + prefix : basePath;
        String pathAndName = prefixPath + File.separator + name;
        if (StrUtil.isNotBlank(audioPath)) {
            String baseAudioDir = fFmpegProperties.getBaseAudioDir();
            outPath = baseAudioDir + File.separator + pathAndName;
        }
        if (StrUtil.isNotBlank(videoPath)) {
            String baseVideoDir = fFmpegProperties.getBaseVideoDir();
            outPath = baseVideoDir + File.separator + pathAndName;
        }
        ParamVerifyUtil.stringIsBlack(outPath, "音频或视频路径不能全部为空");

        if (FileUtil.exist(outPath)) {
            return outPath;
        }

        //  下载到本地路径中
        if (HttpUtil.isHttp(sourcePath) || HttpUtil.isHttps(sourcePath)) {
            try {
                FileUtil.mkParentDirs(outPath);
                HttpUtil.downloadFile(sourcePath, outPath);
            } catch (Exception e) {
                log.error("服务器资源【{}】获取失败", sourcePath, e);
                throw new RuntimeException("下载服务器资源失败");
            }
        }
        return Optional.ofNullable(outPath)
                .filter(FileUtil::exist)
                .orElseThrow(() -> new IllegalArgumentException("基础音频或视频下载失败"));
    }

    // ---------------------------------------正文_时长--------------------------------------------------

    /**
     * 正文视频合成 - 时长
     *
     * @param videoPath  视频路径
     * @param time       总时长 （秒）
     * @param uniqueCode 视频唯一编码
     * @return 视频路径
     */
    public String trainingContentWithTime(String videoPath, int time, String uniqueCode) {
        String dirPrefix = getDirPrefix(uniqueCode, false);
        // video
        String videoSuffix = StrUtil.DOT + "ts";
        // 视频路径
        String timeVideoDir = dirPrefix + VIDEO + File.separator + "time" + File.separator;
        FileUtil.mkdir(timeVideoDir);

        // 1 组合对应时长的视频
        String videoLoopPath = timeVideoDir + "loop" + videoSuffix;
        boolean videoLoop = videoLoop(uniqueCode, videoPath, time * 1000L, videoLoopPath);
        ParamVerifyUtil.isFalse(videoLoop, "正文_时长视频循环截取失败");

        FFmpegLocalFileContext.set(uniqueCode, videoLoopPath);

        // 2 每一秒添加水印
        String targetVideoPath = timeVideoDir + "watermark" + videoSuffix;

        // "%{eif\\:t\\:u}/" + time
        boolean doWatermarkVideo = doWatermarkVideo(videoLoopPath, targetVideoPath, "%{eif\\:t\\:u}/" + time + SECOND);
        ParamVerifyUtil.isFalse(doWatermarkVideo, "正文_时长视频水印添加失败");
        FFmpegLocalFileContext.set(uniqueCode, targetVideoPath);
        // 3 合并视频合音频
        String backgroundAudio = this.obtainBaseAudio(fFmpegProperties.getBackgroundAudio());
        String completedVideoPath = timeVideoDir + uniqueCode + videoSuffix;
        boolean mergeAudioToVideo = FFmpegUtil.mergeAudioToVideo0(targetVideoPath, backgroundAudio, completedVideoPath);
        if (mergeAudioToVideo) {
            return completedVideoPath;
        }
        throw new IllegalArgumentException("正文_时长视频生成失败");
    }


    // ---------------------------------------正文_次数--------------------------------------------------

    /**
     * 正文视频合成 - 次数
     *
     * @param videoPath  视频路径
     * @param number     次数
     * @param uniqueCode 视频唯一编码
     * @return 视频路径
     */
    public String trainingContentWithNumber(String videoPath, int number, String uniqueCode) {

        String dirPrefix = getDirPrefix(uniqueCode, false);
        // audio
        String audioSuffix = StrUtil.DOT + fFmpegProperties.getBaseAudioSuffix();
        String videoSuffix = StrUtil.DOT + FileUtil.getSuffix(videoPath);
        // 合成音频
        String numberAudioDir = dirPrefix + AUDIO + File.separator + NUMBER_STR + File.separator;
        FileUtil.mkdir(numberAudioDir);
        String numberVideoDir = dirPrefix + VIDEO + File.separator + NUMBER_STR + File.separator;
        FileUtil.mkdir(numberVideoDir);

        boolean hasAudio = FFmpegUtil.hasAudio(videoPath);
        List<String> allVideo = new ArrayList<>();

        String videoWatermarkPath = numberVideoDir + "watermark";
        for (int i = 1; i <= number; i++) {
            // 1 处理合并单次动作音频
            List<String> audios = new ArrayList<>();
            frequencyAudio(i, audios::add);
            // 120s 空音频
            audios.add(obtainEmptyAudio("120"));
            String currentAudioPath = numberAudioDir + i + audioSuffix;
            boolean concatAudio = FFmpegUtil.concatAudio0(audios, currentAudioPath);
            ParamVerifyUtil.isFalse(concatAudio, "正文_次数" + i + "音频合成失败");
            // 2 单次动作音频与视频合并
            String currentVideoPath = numberVideoDir + i + videoSuffix;
            boolean mergeAudioToVideo;
            if (hasAudio) {
                mergeAudioToVideo = FFmpegUtil.mergeAudioToVideo(videoPath, currentAudioPath, currentVideoPath);
            } else {
                mergeAudioToVideo = FFmpegUtil.mergeAudioToVideo0(videoPath, currentAudioPath, currentVideoPath);
            }
            ParamVerifyUtil.isFalse(mergeAudioToVideo, "正文_次数" + i + "音频与视频合成失败");
            // 3 处理单次动作的水印
            String targetVideoPath = videoWatermarkPath + File.separator + i + videoSuffix;
            FileUtil.mkParentDirs(targetVideoPath);
            boolean watermarkVideo = doWatermarkVideo(currentVideoPath, targetVideoPath, i + "/" + number + CI);
            if (watermarkVideo) {
                allVideo.add(targetVideoPath);
            }
        }
        FFmpegLocalFileContext.set(uniqueCode, numberAudioDir);
        FFmpegLocalFileContext.set(uniqueCode, videoWatermarkPath);

        String completedVideoPath = numberVideoDir + uniqueCode + videoSuffix;
        boolean mergeVideoWithTxt = FFmpegUtil.mergeVideoWithTxt(allVideo, completedVideoPath, NUMBER_STR + File.separator);
        if (mergeVideoWithTxt) {
            return completedVideoPath;
        }
        throw new IllegalArgumentException("正文_次数视频合成失败");
    }

    private boolean doWatermarkVideo(String sourcePath, String targetPath, String... contents) {
        return this.doWatermarkVideo(sourcePath, targetPath, true, contents);
    }

    private boolean doWatermarkVideo(String sourcePath, String targetPath, boolean isMerge, String... contents) {
        FFmpegFontProperties font = fFmpegProperties.getFont();
        Integer fontSize = font.getSize();
        Integer xLocation = font.getXLocation();
        Integer yLocation = font.getYLocation();
        String fontColor = font.getColor();

        WatermarkInfoDTO watermarkInfoDTO = new WatermarkInfoDTO();
        watermarkInfoDTO.setSourcePath(sourcePath);
        watermarkInfoDTO.setTargetPath(targetPath);

        if (isMerge) {
            WatermarkInfoDTO.FontInfo fontDesc = new WatermarkInfoDTO.FontInfo();
            fontDesc.setFontSize(fontSize.toString());
            fontDesc.setXLocation(xLocation.toString());
            fontDesc.setYLocation(yLocation.toString());
            fontDesc.setFontColor(fontColor);
            fontDesc.setContent(String.join("", contents));
            watermarkInfoDTO.setFontInfos(Collections.singletonList(fontDesc));
        } else {
            int length = contents.length;
            List<WatermarkInfoDTO.FontInfo> fontInfos = IntStream.rangeClosed(1, length)
                    .filter(i -> StrUtil.isNotBlank(contents[i - 1]))
                    .mapToObj(i -> {
                        WatermarkInfoDTO.FontInfo fontDesc = new WatermarkInfoDTO.FontInfo();
                        fontDesc.setFontSize(fontSize.toString());
                        fontDesc.setXLocation(xLocation.toString());
                        int yLocation1 = yLocation + 35 * (i - 1);
                        fontDesc.setYLocation(Integer.toString(yLocation1));
                        fontDesc.setFontColor(fontColor);
                        fontDesc.setContent(contents[i - 1]);
                        return fontDesc;
                    })
                    .collect(Collectors.toList());
            watermarkInfoDTO.setFontInfos(fontInfos);
        }
        return FFmpegUtil.watermarkVideo(watermarkInfoDTO);
    }

    /**
     * 本地视频路径
     *
     * @param uniqueCode         视频唯一标识
     * @param completedVideoPath 本地视频路径
     */
    private void doCallback(String uniqueCode, String completedVideoPath) {

    }

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        this.fFmpegProperties = applicationContext.getBean(FFmpegProperties.class);
        if (StrUtil.isNotBlank(fFmpegProperties.getPath())) {
            FFmpegUtil.FFMPEG_PATH = fFmpegProperties.getPath();
        }
        this.redissonClient = applicationContext.getBean(RedissonClient.class);
    }
}
