const { FFMPEG_PATH } = require("./ffmpeg");
const { loadConfig } = require("./config");
const transitionEffects = require("./transitionEffects");
const encodeFormat = loadConfig().encodeFormat || "h264_nvenc";
/**
 * 获取视频尺寸设置（从配置文件）
 * @returns {Object} 包含width和height属性的对象
 */
function getVideoDimensions() {
  try {
    const config = loadConfig();
    return {
      width: config.projectSettings.videoWidth || 1920,
      height: config.projectSettings.videoHeight || 1080,
    };
  } catch (error) {
    console.warn("无法加载配置文件，使用默认尺寸:", error.message);
    return { width: 1920, height: 1080 };
  }
}

/**
 * 生成调整图片/视频尺寸的FFmpeg过滤器
 * @returns {string} FFmpeg过滤器字符串
 */
function generateScaleFilter() {
  const { width, height } = getVideoDimensions();
  return (
    `scale=w=${width}:h=${height}:force_original_aspect_ratio=decrease,` +
    `crop=w=min(${width}\\,iw):h=min(${height}\\,ih):x=(iw-min(${width}\\,iw))/2:y=(ih-min(${height}\\,ih))/2,` +
    `pad=w=${width}:h=${height}:x=(ow-iw)/2:y=(oh-ih)/2:color=black`
  );
}

/**
 * 生成图片转视频命令
 * @param {string} imagePath - 图片路径
 * @param {string} outputPath - 输出视频路径
 * @param {number} duration - 持续时间（秒）
 * @returns {string} FFmpeg命令
 */
function generateImageToVideoCommand(imagePath, outputPath, duration) {
  const scaleFilter = generateScaleFilter();
  return `"${FFMPEG_PATH}" -y -loglevel quiet -loop 1 -i "${imagePath}" -vf "${scaleFilter}" -c:v ${encodeFormat} -t ${duration} -pix_fmt yuv420p -r 30 "${outputPath}"`;
}

/**
 * 生成视频裁剪命令
 * @param {string} videoPath - 视频路径
 * @param {string} outputPath - 输出视频路径
 * @param {number} startTime - 开始时间（秒）
 * @param {number} duration - 持续时间（秒）
 * @returns {string} FFmpeg命令
 */
function generateTrimVideoCommand(videoPath, outputPath, startTime, duration) {
  const scaleFilter = generateScaleFilter();
  return `"${FFMPEG_PATH}" -y -loglevel quiet -ss ${startTime} -i "${videoPath}" -t ${duration} -vf "${scaleFilter}" -c:v ${encodeFormat} -preset medium -crf 23 -pix_fmt yuv420p -an "${outputPath}"`;
}

/**
 * 生成视频循环命令
 * @param {string} videoPath - 视频路径
 * @param {string} outputPath - 输出视频路径
 * @param {number} duration - 目标持续时间（秒）
 * @returns {string} FFmpeg命令
 */
function generateLoopVideoCommand(videoPath, outputPath, duration) {
  const scaleFilter = generateScaleFilter();
  return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${videoPath}" -t ${duration} -vf "${scaleFilter}" -c:v ${encodeFormat} -preset medium -crf 23 -pix_fmt yuv420p "${outputPath}"`;
}

/**
 * 生成图片尺寸调整命令
 * @param {string} imagePath - 输入图片路径
 * @param {string} outputPath - 输出图片路径
 * @returns {string} FFmpeg命令
 */
function generateResizeImageCommand(imagePath, outputPath) {
  const scaleFilter = generateScaleFilter();
  return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${imagePath}" -vf "${scaleFilter}" "${outputPath}"`;
}

/**
 * 生成创建纯色视频命令
 * @param {string} outputPath - 输出视频路径
 * @param {number} duration - 持续时间（秒）
 * @param {string} color - 颜色（默认为蓝色）
 * @returns {string} FFmpeg命令
 */
function generatePlaceholderVideoCommand(outputPath, duration, color = "blue") {
  const { width, height } = getVideoDimensions();
  return `"${FFMPEG_PATH}" -y -loglevel quiet -f lavfi -i color=c=${color}:s=${width}x${height}:d=${duration} -c:v ${encodeFormat} -pix_fmt yuv420p -r 30 "${outputPath}"`;
}

/**
 * 生成视频信息获取命令
 * @param {string} videoPath - 视频路径
 * @returns {string} FFmpeg命令
 */
function generateVideoInfoCommand(videoPath) {
  return `"${FFMPEG_PATH}" -v quiet -show_entries format=duration -of json "${videoPath}"`;
}

/**
 * 生成备用视频信息获取命令
 * @param {string} videoPath - 视频路径
 * @returns {string} FFmpeg命令
 */
function generateFallbackVideoInfoCommand(videoPath) {
  return `"${FFMPEG_PATH}" -i "${videoPath}" 2>&1 | findstr "Duration"`;
}

/**
 * 生成视频合并命令
 * @param {string} concatFilePath - concat文件路径
 * @param {string} outputPath - 输出视频路径
 * @param {boolean} keepAudio - 是否保留音频
 * @returns {string} FFmpeg命令
 */
function generateConcatVideoCommand(concatFilePath, outputPath, keepAudio = false) {
  const fs = require("fs");
  const path = require("path");
  const concatContent = fs.readFileSync(concatFilePath, "utf8");
  const lines = concatContent.split("\n").filter(line => line.trim() !== "");
  const concatFileDir = path.dirname(concatFilePath);

  // 提取文件路径和时长
  const files = [];
  const durations = [];
  for (let i = 0; i < lines.length; i += 2) {
    if (lines[i].startsWith("file ")) {
      const filePath = lines[i].match(/file '(.+)'$/)[1];
      const fullPath = `${concatFileDir}/${filePath}`;
      // 验证文件存在
      if (!fs.existsSync(fullPath)) {
        throw new Error(`Input file does not exist: ${fullPath}`);
      }
      files.push(fullPath);
      if (lines[i + 1] && lines[i + 1].startsWith("duration ")) {
        const duration = parseFloat(lines[i + 1].match(/duration ([\d.]+)/)[1]);
        if (isNaN(duration) || duration <= 0) {
          throw new Error(`Invalid duration for file ${filePath}: ${duration}`);
        }
        durations.push(duration);
      }
    }
  }

  // 验证文件和时长数量匹配
  if (files.length !== durations.length) {
    throw new Error(`Mismatch between files (${files.length}) and durations (${durations.length})`);
  }

  // 构建输入参数
  const inputParams = files.map(file => `-i "${file}"`).join(" ");
  const config = loadConfig();

  // 为每个输入流添加scale、setsar、fps滤镜，统一像素格式和时间基准
  const preprocessedStreams = files.map((_, index) => {
    return `[${index}:v]scale=${config.projectSettings.videoWidth}:${config.projectSettings.videoHeight},setsar=1/1,fps=30,format=yuv420p[v${index}]`;
  });

  // 创建过渡效果链
  let filterChain = [...preprocessedStreams];

  // 存储每个转场的时长
  const transDurations = [];

  // 先计算所有转场时长
  for (let i = 1; i < files.length; i++) {
    // 转场时长取前后视频时长的较小值，且不超过1秒
    const transDur = Math.min(1, Math.min(durations[i - 1], durations[i]));
    transDurations.push(transDur);
  }

  // 对每个视频（除了最后一个）添加tpad以补偿转场消耗的时间，并创建带tpad的标签
  const paddedStreams = [];
  for (let i = 0; i < files.length - 1; i++) {
    const paddedLabel = `[v${i}_padded]`;
    filterChain.push(`[v${i}]tpad=stop_mode=clone:stop_duration=${transDurations[i]}${paddedLabel}`);
    paddedStreams.push(paddedLabel);
  }
  // 最后一个视频不需要tpad
  paddedStreams.push(`[v${files.length - 1}]`);

  // 应用xfade转场
  let prevStream = paddedStreams[0]; // 从第一个带tpad的流开始
  let cumDur = 0; // 累积时长用于offset计算

  let filterChainAudio = [];
  let mergedAudioInfo = [];
  for (let i = 1; i < files.length; i++) {
    // 计算偏移量：前面所有视频的原始时长之和
    const offset = cumDur + durations[i - 1] - transDurations[i - 1]; // 减去上一个转场时长，因为它是重叠的

    // 随机选择一个过渡效果
    let randomEffect = transitionEffects[Math.floor(Math.random() * transitionEffects.length)];
    if(process.env.EFFECT) randomEffect = process.env.EFFECT;
    // if(!config.transitionEffects){
    //   randomEffect = "fade";
    // }
    console.log("  Using transition effect:", randomEffect);
    const outputLabel = `[out${i}]`;
    filterChain.push(`${prevStream}${paddedStreams[i]}xfade=transition=${randomEffect}:duration=${transDurations[i - 1]}:offset=${offset}${outputLabel}`);
    prevStream = outputLabel;
    
    if(keepAudio){
      filterChainAudio.push(`[${i}:a]adelay=${offset*1000}|${offset*1000}[a${i}];`);
      mergedAudioInfo.push(`[a${i}]`);
    }

    // 更新累积时长
    cumDur += durations[i - 1];
  }

  // 完整的过滤器链
  const finalFilterChain = filterChain.join(";");
  const finalFilter = `${finalFilterChain};${prevStream}format=yuv420p[vout]`;

  // 计算总时长（所有视频时长之和）
  const totalDuration = durations.reduce((sum, dur) => sum + dur, 0);

  // 调试日志：输出filter_complex和偏移量
  // console.log("Generated filter_complex:", finalFilter);
  // console.log(
  //   "Offsets:",
  //   filterChain.filter(f => f.includes("xfade")).map(f => f.match(/offset=([\d.]+)/)?.[1] || "N/A")
  // );
  // console.log("Total duration:", totalDuration);
  const audioFilter = filterChainAudio.length > 0 ? ";[0:a]adelay=0|0[a0];"+filterChainAudio.join("") : "";
  const audioMap = filterChainAudio.length > 0 ? `-map "[aout]" ` : "";
  const mergedAudioInfoString = mergedAudioInfo.length > 0 ? `[a0]${mergedAudioInfo.join("")}amix=inputs=${mergedAudioInfo.length+1}:duration=longest[aout]` : "";
  const audioEncode = filterChainAudio.length > 0 ? `-c:a aac -b:a 192k` : "";
  return `"${FFMPEG_PATH}" -y -loglevel error ${inputParams} -filter_complex "${finalFilter}${audioFilter}${mergedAudioInfoString}"  -map "[vout]" ${audioMap} -c:v ${encodeFormat} -preset medium -crf 23 -pix_fmt yuv420p -r 30 ${audioEncode} -t ${totalDuration} "${outputPath}"`;
}

/**
 * 生成备用视频合并命令
 * @param {string} concatFilePath - concat文件路径
 * @param {string} subtitleFile - 字幕文件路径
 * @param {string} outputPath - 输出视频路径
 * @returns {string} FFmpeg命令
 */
function generateFallbackConcatVideoCommand(concatFilePath, subtitleFile, outputPath) {
  const subtitleFilter = `ass='${subtitleFile.replace(/\\/g, "/").replace(/:/g, "\\:")}'`;
  return `"${FFMPEG_PATH}" -y -loglevel error -f concat -safe 0 -i "${concatFilePath}" -vf "${subtitleFilter}" -c:v ${encodeFormat} -preset medium -crf 23 -pix_fmt yuv420p -r 30 -max_muxing_queue_size 9999 "${outputPath}"`;
}

/**
 * 生成音视频合并命令
 * @param {string} videoPath - 视频文件路径
 * @param {string} audioFile - 音频文件路径
 * @param {string} outputPath - 输出文件路径
 * @param {number|null} targetDuration - 目标时长（秒）
 * @returns {string} FFmpeg命令
 */
function generateMergeAudioVideoCommand(videoPath, audioFile, outputPath, targetDuration = null) {
  if (targetDuration && targetDuration > 0) {
    const audioFilters = `atrim=0:${targetDuration},apad=whole_dur=${targetDuration}`;
    return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${videoPath}" -i "${audioFile}" -af "${audioFilters}" -c:v copy -c:a aac -strict -1 "${outputPath}"`;
  } else {
    return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${videoPath}" -i "${audioFile}" -c:v copy -c:a aac -strict -1 "${outputPath}"`;
  }
}

/**
 * 生成开头音频合并命令
 * @param {string} videoPath - 视频文件路径
 * @param {string} startAudioFile - 开头音频文件路径
 * @param {string} outputPath - 输出文件路径
 * @returns {string} FFmpeg命令
 */
function generateMergeStartAudioCommand(videoPath, startAudioFile, outputPath) {
  // 使用volume和amix过滤器混合音频轨道
  const audioFilters = "[0:a]volume=1.0[a1];[1:a]volume=0.8[a2];[a1][a2]amix=inputs=2:duration=first";
  return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${videoPath}" -i "${startAudioFile}" -filter_complex "${audioFilters}" -c:v copy -c:a aac -b:a 192k "${outputPath}"`;
}

/**
 * 生成音频混合命令
 * @param {string} startAudioFile - 开头音频文件路径
 * @param {string} mainAudioFile - 主音频文件路径
 * @param {string} outputPath - 输出文件路径
 * @param {number} targetDuration - 目标时长（秒）
 * @returns {string} FFmpeg命令
 */
function generateMergeTwoAudioCommand(startAudioFile, mainAudioFile, outputPath, targetDuration) {
  // 使用amix过滤器混合音频，保持主音频时长
  const audioFilters = `[0:a][1:a]amix=inputs=2:duration=longest,atrim=0:${targetDuration}`;
  return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${startAudioFile}" -i "${mainAudioFile}" -filter_complex "${audioFilters}" -c:a libmp3lame -q:a 4 "${outputPath}"`;
}

/**
 * 生成字幕合并命令
 * @param {string} videoPath - 视频文件路径
 * @param {string} subtitleFile - 字幕文件路径
 * @param {string} outputPath - 输出文件路径
 * @returns {string} FFmpeg命令
 */
function generateMergeSubtitleCommand(videoPath, subtitleFile, outputPath) {
  const subtitleFilter = subtitleFile.length === 0 ? '' : `ass='${subtitleFile.replace(/\\/g, "/").replace(/:/g, "\\:")}'`;
  return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${videoPath}" -vf "${subtitleFilter}" -c:a copy "${outputPath}"`;
}

/**
 * 按顺序合并视频文件
 * @param {Array} videoPaths - 视频文件路径数组
 * @param {string} outputPath - 输出文件路径
 * @returns {string} FFmpeg命令
 */
function mergeVideosOrderedCommand(videoPaths, outputPath) { 
  return `"${FFMPEG_PATH}" -y -loglevel quiet -i "${videoPaths[0]}" -i "${videoPaths[1]}" -filter_complex "[0:v][1:v]concat=n=2:v=1:a=0[v]" -preset medium -c:v ${encodeFormat} -map "[v]" -c:a copy "${outputPath}"`;
}
/**
 * 按顺序合并视频文件,通过连接文件 txt
 * @param {Array} videoPaths - 视频文件路径数组
 * @param {string} outputPath - 输出文件路径
 * @param {any} transition - 转场
 * @returns {string} FFmpeg命令
 */
function mergeVideosOrderedByListCommand(listFile, outputPath ,transition) {
  if(transition ){
    return generateConcatVideoCommand(listFile, outputPath, true);
  } 
  if(process.env.FFLAGS === "true")
  {
    return `"${FFMPEG_PATH}" -y -loglevel quiet -f concat -safe 0 -i "${listFile}" -vsync vfr -fflags +genpts -c copy "${outputPath}"`;
  }
  return `"${FFMPEG_PATH}" -y -loglevel quiet -f concat -safe 0 -i "${listFile}" -c:v copy -c:a copy "${outputPath}"`;
}
/**
 * 生成静态图片视频命令
 * @param {string} imagePath - 图片路径
 * @param {string} subtitleFile - 字幕文件路径
 * @param {string} outputPath - 输出文件路径
 * @param {number} duration - 视频时长
 * @returns {string} FFmpeg命令
 */
function generateStaticImageVideoCommand(imagePath, subtitleFile, outputPath, duration) {  
  const subtitleFilter = subtitleFile.length === 0 ? '' : `ass='${subtitleFile.replace(/\\/g, "/").replace(/:/g, "\\:")}'`;
  return `"${FFMPEG_PATH}" -y -loglevel quiet -loop 1 -i "${imagePath}" -vf "${subtitleFilter}" -c:v ${encodeFormat} -preset medium -crf 23 -t ${duration} "${outputPath}"`;
}

module.exports = {
  generateImageToVideoCommand,
  generateTrimVideoCommand,
  generateLoopVideoCommand,
  generateResizeImageCommand,
  generatePlaceholderVideoCommand,
  generateVideoInfoCommand,
  generateFallbackVideoInfoCommand,
  generateConcatVideoCommand,
  generateFallbackConcatVideoCommand,
  generateMergeAudioVideoCommand,
  generateMergeStartAudioCommand,
  generateMergeTwoAudioCommand,
  generateMergeSubtitleCommand,
  generateStaticImageVideoCommand,
  getVideoDimensions,
  mergeVideosOrderedCommand,
  mergeVideosOrderedByListCommand
};
