import { FFFSType, FFmpeg } from "@ffmpeg/ffmpeg";
import { joinPath } from "ente-base/file-name";
import { newID } from "ente-base/id";
import log from "ente-base/log";
import type { FFmpegCommand } from "ente-base/types/ipc";
import { PromiseQueue } from "ente-utils/promise";
import { z } from "zod";
import {
    ffmpegPathPlaceholder,
    inputPathPlaceholder,
    outputPathPlaceholder,
} from "./constants";

/** Lazily initialized and loaded FFmpeg instance. */
let _ffmpeg: Promise<FFmpeg> | undefined;

/** Queue of in-flight requests. */
const _ffmpegTaskQueue = new PromiseQueue<Uint8Array | number>();

/**
 * Return the shared {@link FFmpeg} instance, lazily creating and loading it if
 * needed.
 */
const ffmpegLazy = (): Promise<FFmpeg> => (_ffmpeg ??= createFFmpeg());

const createFFmpeg = async () => {
    const ffmpeg = new FFmpeg();
    await ffmpeg.load({
        coreURL: "https://assets.ente.io/ffmpeg-core-0.12.10/ffmpeg-core.js",
        wasmURL: "https://assets.ente.io/ffmpeg-core-0.12.10/ffmpeg-core.wasm",
    });
    // This is too noisy to enable even during development. Uncomment to taste.
    // ffmpeg.on("log", (e) => log.debug(() => ["[ffmpeg]", e.message]));
    return ffmpeg;
};

/**
 * Run the given FFmpeg command using a Wasm FFmpeg running in a web worker.
 *
 * This is a sibling of {@link ffmpegExec} exposed by the desktop app in
 * `ipc.ts`. As a rough ballpark, currently the native FFmpeg integration in the
 * desktop app is 10-20x faster than the Wasm one.
 *
 * See: [Note: FFmpeg in Electron].
 *
 * @param command The FFmpeg command to execute.
 *
 * @param blob The input blob on which to run the command.
 *
 * @param outputFileExtension The extension of the (temporary) output file which
 * will be generated by the command.
 *
 * @returns The contents of the output file generated as a result of executing
 * {@link command} on {@link blob}.
 */
export const ffmpegExecWeb = async (
    command: FFmpegCommand,
    blob: Blob,
    outputFileExtension: string,
): Promise<Uint8Array> => {
    const ffmpeg = await ffmpegLazy();
    // Interleaving multiple ffmpeg.execs causes errors like
    //
    // >  "Out of bounds memory access (evaluating 'Module["_malloc"](len)')"
    //
    // So serialize them using a promise queue.
    return _ffmpegTaskQueue.add(() =>
        ffmpegExec(ffmpeg, command, outputFileExtension, blob),
    ) as Promise<Uint8Array>;
};

/**
 * Determine the duration of the given video blob.
 *
 * This is a specialized variant of {@link ffmpegExecWeb} that uses the same
 * queue but internally uses ffprobe to try and determine the video's duration.
 *
 * @param blob The input blob on which to run the command, provided as a blob.
 *
 * @returns The duration of the {@link blob} (if it indeed is a video).
 */
export const determineVideoDurationWeb = async (
    blob: Blob,
): Promise<number> => {
    const ffmpeg = await ffmpegLazy();
    return _ffmpegTaskQueue.add(() =>
        ffprobeExecVideoDuration(ffmpeg, blob),
    ) as Promise<number>;
};

const ffmpegExec = async (
    ffmpeg: FFmpeg,
    command: FFmpegCommand,
    outputFileExtension: string,
    blob: Blob,
) => {
    const outputSuffix = outputFileExtension ? "." + outputFileExtension : "";
    const outputPath = newID("out_") + outputSuffix;

    // Exit status of the ffmpeg.exec invocation.
    // `0` if no error, `!= 0` if timeout (1) or error.
    let status: number | undefined;

    return withInputMount(ffmpeg, blob, async (inputPath) => {
        try {
            const startTime = Date.now();

            let resolvedCommand: string[];
            if (Array.isArray(command)) {
                resolvedCommand = command;
            } else {
                const isHDR = await isHDRVideo(ffmpeg, inputPath);
                resolvedCommand = isHDR ? command.hdr : command.default;
            }

            const cmd = substitutePlaceholders(
                resolvedCommand,
                inputPath,
                outputPath,
            );

            status = await ffmpeg.exec(cmd);
            if (status !== 0) {
                log.info(
                    `[wasm] ffmpeg command failed with exit code ${status}: ${cmd.join(" ")}`,
                );
                throw new Error(
                    `ffmpeg command failed with exit code ${status}`,
                );
            }

            const result = await ffmpeg.readFile(outputPath);
            if (typeof result == "string")
                throw new Error("Expected binary data");

            const ms = Date.now() - startTime;
            log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`);
            return result;
        } finally {
            try {
                await ffmpeg.deleteFile(outputPath);
            } catch (e) {
                // Output file might not even exist if the command did not succeed,
                // so only log on success.
                if (status === 0) {
                    log.error(`Failed to remove output ${outputPath}`, e);
                }
            }
        }
    });
};

const withInputMount = async <T>(
    ffmpeg: FFmpeg,
    blob: Blob,
    f: (inputPath: string) => Promise<T>,
): Promise<T> => {
    const mountDir = "/mount";
    const inputFileName = newID("in_");
    const inputPath = joinPath(mountDir, inputFileName);

    const inputFile = new File([blob], inputFileName);

    try {
        await ffmpeg.createDir(mountDir);
        await ffmpeg.mount(FFFSType.WORKERFS, { files: [inputFile] }, mountDir);

        return await f(inputPath);
    } finally {
        try {
            await ffmpeg.unmount(mountDir);
        } catch (e) {
            log.error(`Failed to remove mount ${mountDir}`, e);
        }
        try {
            await ffmpeg.deleteDir(mountDir);
        } catch (e) {
            log.error(`Failed to delete mount directory ${mountDir}`, e);
        }
    }
};

const substitutePlaceholders = (
    command: string[],
    inputFilePath: string,
    outputFilePath: string,
) =>
    command
        .map((segment) => {
            if (segment == ffmpegPathPlaceholder) {
                return undefined;
            } else if (segment == inputPathPlaceholder) {
                return inputFilePath;
            } else if (segment == outputPathPlaceholder) {
                return outputFilePath;
            } else {
                return segment;
            }
        })
        .filter((s) => s !== undefined);

const FFProbeOutputIsHDR = z.object({
    streams: z.array(z.object({ color_transfer: z.string().optional() })),
});

/**
 * A variant of the {@link isHDRVideo} function in the desktop app source (see
 * `ffmpeg.ts`), except here we have access to ffprobe and can use that instead
 * of parsing the ffmpeg stderr.
 *
 * See: [Note: Alternative FFmpeg command for HDR videos]
 *
 * @param inputFilePath The path to a video file on the FFmpeg FS.
 *
 * @returns `true` if this file is likely a HDR video. Exceptions are treated as
 * `false` to make this function safe to invoke without breaking the happy path.
 */
const isHDRVideo = async (ffmpeg: FFmpeg, inputFilePath: string) => {
    let jsonString: string | undefined;
    try {
        jsonString = await ffprobeOutput(
            ffmpeg,
            [
                ["-i", inputFilePath],
                // Show information about streams.
                "-show_streams",
                // Select the first video stream. This is not necessarily
                // correct in a multi stream file because the ffmpeg automatic
                // mapping will use the highest resolution stream, but short of
                // reinventing ffmpeg's resolution mechanism, it is a reasonable
                // assumption for our current heuristic check.
                ["-select_streams", "v:0"],
                // Output JSON
                ["-of", "json"],
                ["-o", "output.json"],
            ].flat(),
            "output.json",
        );

        const output = FFProbeOutputIsHDR.parse(JSON.parse(jsonString));
        switch (output.streams[0]?.color_transfer) {
            case "smpte2084":
            case "arib-std-b67":
                return true;
            default:
                return false;
        }
    } catch (e) {
        log.warn("Could not detect HDR status", e);
        if (jsonString) log.debug(() => ["ffprobe-output", jsonString]);
        return false;
    }
};

/**
 * Return the textual output produced by verbatim invoking the given ffprobe
 * {@link cmd} that is expected to writes to a file named {@link outputFilePath}
 * in the FFmpeg FS.
 *
 * The file generated at {@link outputFilePath} is removed in all cases.
 */
const ffprobeOutput = async (
    ffmpeg: FFmpeg,
    cmd: string[],
    outputPath: string,
) => {
    // Exit status of the ffmpeg.ffprobe invocation.
    // `0` if no error, `!= 0` if timeout (1) or error.
    let status: number | undefined;

    try {
        status = await ffmpeg.ffprobe(cmd);
        // Currently, ffprobe incorrectly returns status -1 on success.
        // https://github.com/ffmpegwasm/ffmpeg.wasm/issues/817
        if (status !== 0 && status != -1) {
            log.info(
                `[wasm] ffprobe command failed with exit code ${status}: ${cmd.join(" ")}`,
            );
            throw new Error(`ffprobe command failed with exit code ${status}`);
        }

        const result = await ffmpeg.readFile(outputPath, "utf8");
        if (typeof result != "string") throw new Error("Expected text data");

        return result;
    } finally {
        try {
            await ffmpeg.deleteFile(outputPath);
        } catch (e) {
            // Output file might not even exist if the command did not succeed,
            // so only log on success.
            if (status !== 0 && status != -1) {
                log.error(`Failed to remove output ${outputPath}`, e);
            }
        }
    }
};

const FFProbeOutputDuration = z.object({
    format: z.object({ duration: z.string() }),
});

const ffprobeExecVideoDuration = async (ffmpeg: FFmpeg, blob: Blob) =>
    withInputMount(ffmpeg, blob, async (inputPath) => {
        // Determine the video duration from the container, bypassing the issues
        // with stream selection.
        //
        //    ffprobe -v error -show_entries format=duration -of
        //    default=noprint_wrappers=1:nokey=1 input.mp4
        //
        // Source:
        // https://trac.ffmpeg.org/wiki/FFprobeTips#Formatcontainerduration
        //
        // Reference: https://ffmpeg.org/ffprobe.html
        //
        // Since we cannot grab the stdout easily, the command has been modified
        // to output to a file instead. However, in doing the command seems to
        // have become flaky - for certain videos, it outputs extra lines and
        // not just the duration. So we also switch to the JSON output for more
        // robust behaviour, and parse the duration from it.

        const jsonString = await ffprobeOutput(
            ffmpeg,
            [
                ["-i", inputPath],
                ["-v", "error"],
                ["-show_entries", "format=duration"],
                ["-of", "json"],
                ["-o", "output.json"],
            ].flat(),
            "output.json",
        );

        const durationString = FFProbeOutputDuration.parse(
            JSON.parse(jsonString),
        ).format.duration;

        const duration = parseFloat(durationString);
        if (isNaN(duration)) {
            const msg = "Could not parse video duration";
            log.warn(msg, durationString);
            throw new Error(msg);
        }
        return duration;
    });
