import * as mv from "mv";
import * as path from "path";
import * as fse from "fs-extra";
import * as express from "express";
import * as SparkMD5 from "spark-md5";
import * as multiparty from "multiparty";

import { debug } from "../utils/log";
import {
	createAudioLabelFromBuf,
	createVideoLabel,
	createVideoLabelFromFilename,
	insertCache,
} from "./../db/cache";
import { Message } from "../utils/message";
import { existFilename, existKey } from "../db/media";
import {
	genMP3StorePath,
	genTempChunkDir,
	safeRemoveDir,
	setCopyName,
} from "../utils/fs";
import { bufToArrayBuffer, chunkSort, md5FromBuffer } from "../utils/md5";

export function routerAudioUpload(req: express.Request, res: express.Response) {
	const form = new multiparty.Form();
	form.parse(req, async function (error, _fields, files) {
		const inputFile = files?.file[0];
		let newPath = genMP3StorePath(inputFile?.originalFilename);

		const key = md5FromBuffer(fse.readFileSync(inputFile.path));
		const keyExist = await existKey(key);

		if (keyExist > 0) {
			debug({
				url: "/upload",
				code: "UPLOAD_FILE_EXISTED",
			});

			return res.send(
				Message.err("UPLOAD_FAILED", 502, {
					info: "UPLOAD_FILE_EXISTED",
				})
			);
		}

		const fileExist = await existFilename(newPath);

		if (fileExist > 0) {
			newPath = setCopyName(newPath);
		}

		handleMove(error, inputFile.path, newPath, res);
	});
}

/**
 *
 * @param fields 同 mv Fields
 * @param key 键名
 * @returns 安全返回字符串，没有获得则返回 ""
 */
function safeGetField(fields: any, key: string) {
	const temp = fields[key];

	if (Array.isArray(temp) && typeof temp[0] === "string") {
		return temp[0];
	}

	return "";
}

function handleMove(
	error: any,
	inputFilePath: string,
	newPath: string,
	res: express.Response,
	upt?: string
) {
	mv(inputFilePath, newPath, (mvErr) => {
		if (error) {
			debug({
				error,
				url: "/upload",
				code: "UPLOAD_FAILED",
			});

			return res.send(
				Message.err("UPLOAD_FAILED", 502, {
					info: "UPLOAD_FAILED",
					error: error,
				})
			);
		}

		if (mvErr) {
			debug({
				url: "/upload",
				code: "UPLOAD_FAILED",
				error: mvErr,
			});

			return res.send(
				Message.err("UPLOAD_FAILED", 502, {
					info: "UPLOAD_FAILED",
					error: mvErr,
				})
			);
		}
		debug({
			url: "/upload",
			code: "UPLOAD_SUCCESS",
			uploadTimes: Number(upt) + 1,
		});

		return res.send(
			Message.success({
				info: "UPLOAD_SUCCESS",
				uploadTimes: Number(upt) + 1,
			})
		);
	});
}

export function routerUploadChunks(
	req: express.Request,
	res: express.Response
) {
	const form = new multiparty.Form();

	try {
		form.parse(req, async (error, fields, files) => {
			const inputFile = files?.file[0];
			const uid = safeGetField(fields, "uid");
			const upt = safeGetField(fields, "uploadTimes");

			const chunkDir = genTempChunkDir(inputFile?.originalFilename, uid);

			fse.ensureDirSync(chunkDir);

			const newPath = path.resolve(chunkDir, inputFile?.originalFilename);

			handleMove(error, inputFile.path, newPath, res, upt);
		});
	} catch (error) {
		return res.send(
			Message.success({
				error,
				info: "UPLOAD_CHUNKS_ERR",
			})
		);
	}
}

export async function routerMergeChunks(
	req: express.Request,
	res: express.Response
) {
	const form = new multiparty.Form();

	try {
		form.parse(req, async (_error, fields, _files) => {
			let endBuf = Buffer.alloc(0);
			const spark = new SparkMD5.ArrayBuffer();

			const uid = safeGetField(fields, "uid");
			let name = safeGetField(fields, "name");
			const mediatype = safeGetField(fields, "mediatype");

			const chunkDir = genTempChunkDir(name, uid);
			const newPath = path.resolve(chunkDir, name);

			fse
				.readdirSync(chunkDir)
				.sort(chunkSort)
				.forEach((chunkPath) => {
					const chunkFile = chunkDir + "/" + chunkPath;
					const chunkBuf = fse.readFileSync(chunkFile);
					endBuf = Buffer.concat([endBuf, chunkBuf]);
					fse.appendFileSync(newPath, chunkBuf);

					debug({ chunkFile, chunkSize: chunkBuf.length });

					spark.append(bufToArrayBuffer(chunkBuf));
				});

			debug({ uid, name });

			const endKey = spark.end();
			const keyExist = await existKey(endKey);

			debug({ keyExist, key: endKey });

			if (keyExist > 0) {
				debug({
					url: "/upload",
					code: "UPLOAD_FILE_EXISTED",
				});

				safeRemoveDir(chunkDir);

				return res.send(
					Message.err("UPLOAD_FAILED", 502, {
						info: "UPLOAD_FILE_EXISTED",
					})
				);
			} else {
				const fileExist = await existFilename(name);

				if (fileExist > 0) {
					name = setCopyName(name);
				}

				const label =
					mediatype === "AUDIO"
						? createAudioLabelFromBuf(endBuf)
						: createVideoLabelFromFilename(name);

				const storePath = genMP3StorePath(name);

				const state = await insertCache({
					key: endKey,
					label,
					tags: [],
				});

				debug({ insertToCache: state });

				fse.renameSync(newPath, storePath);

				safeRemoveDir(chunkDir);

				return res.send(
					Message.success({
						info: "MERGE_CHUNKS_SUCCESS",
					})
				);
			}
		});
	} catch (error) {
		return res.send(
			Message.success({
				error,
				info: "MERGE_CHUNKS_ERR",
			})
		);
	}
}
