const multiparty = require("multiparty");
const path = require("path");
const fse = require("fs-extra");
const fs = require("fs");
const { Utils, Result } = require("./utils");
const { UPLOAD_PATH, CHUNK_SIZE } = require("./config");


/**
 * 接收post数据
 * @param req
 * @return {Promise<Object>}
 */
const resolvePost = (req) => {
    return new Promise((resolve) => {
        let chunk = "";
        req.on("data", (d) => (chunk += d));
        req.on("end", () => resolve(chunk ? JSON.parse(chunk) : {}));
    });
};

/**
 * 合并切片
 * @param {String} fileName 文件名称
 * @param {String} chunkPath 文件切片地址
 * @param {Number} chunkCount 切片总数
 * @param {String} hash 文件hash
 * @param {String} fileDir 文件最后保存的目录地址
 */
const mergeChunkFile = ({fileName, chunkPath, chunkCount, hash, fileDir="./"}) => {
    const isExist = fse.existsSync(chunkPath)
    // 检查目录是否存在
    if (!isExist) return Promise.reject(new Error('The chunk directory does not exist'))
    if (fs.readdirSync(chunkPath).length === 0) {
        return Promise.reject(new Error('Chunk cannot be an empty directory'))
    }
    const dataPath = path.resolve(UPLOAD_PATH, fileDir, fileName); // 文件存放地址
    let writeStream = fs.createWriteStream(dataPath);
    let mergedChunkIndex = 0;
    return new Promise((resolve, reject) => {
        return mergeCore()
        function mergeCore() {
            //结束标志为已合并数量大于总数（mergedChunkNum从0开始）
            if (mergedChunkIndex >= chunkCount) {
                return resolve(true)
            }
            const curChunk = path.resolve(chunkPath, `${hash}-${mergedChunkIndex}`);
            const curChunkReadStream = fs.createReadStream(curChunk); // 当前刻度流
            // 将readStream 写入 writeStream
            curChunkReadStream.pipe(writeStream, {end: false});  //end = false 则可以连续给writeStream 写数据

            // readStream 传输结束 则 递归 进行下一个文件流的读写操作
            curChunkReadStream.on("end", () => {
                fs.unlinkSync(curChunk) //删除chunkFile
                mergedChunkIndex += 1
                mergeCore();
            });

            // 监听错误事件，关闭可写流，防止内存泄漏
            curChunkReadStream.on("error", (err) => {
                writeStream.close();
                reject(err)
            })
        }
    })
}

/**
 * 多个文件合并成一个文件
 * @param {String} chunkDir 切片目录
 * @param {String} dataDir 最后生成的文件目录
 * @param {String} fileName 文件名称
 * @param {Function} sortFn 排序方法，排序后返回新的列表
 * @return {Promise<Boolean>}
 */
const mergeChunks = (chunkDir, dataDir, fileName, sortFn) => {
    // 检测目录是否存在
    if (!fs.existsSync(chunkDir)) {
        return Promise.reject(new Error('The chunk directory does not exist'))
    }
    let chunks = fs.readdirSync(chunkDir); // 所有chunks
    if (!chunks.length) {
        return Promise.reject(new Error('Chunk cannot be an empty directory'))
    }
    chunks = typeof sortFn === 'function' ? sortFn(chunks) : chunks.sort((a, b) => a - b); // 对chunk进行排序
    const dataPath = path.resolve(dataDir, fileName);
    const writeChunkStream = fs.createWriteStream(dataPath); // 创建一个可写流
    return new Promise((resolve, reject) => {
        mergeCore(chunks)
        function mergeCore(scripts) {
            if (scripts.length === 0) {
                return resolve(true)
            }
            const curChunk = path.resolve(chunkDir, scripts.shift())
            const curChunkReadStream = fs.createReadStream(curChunk); // 获取当前的可读流
            curChunkReadStream.pipe(writeChunkStream, { end: false });

            // readStream 传输结束 则 递归 进行下一个文件流的读写操作
            curChunkReadStream.on("end", () => {
                fs.unlinkSync(curChunk) // 删除chunkFile
                mergeCore(scripts);
            })

            // 监听错误事件，关闭可写流，防止内存泄漏
            curChunkReadStream.on("error", error => {
                writeChunkStream.close()
                resolve(error)
            })
        }
    })
}

/**
 * 验证hash
 * @param hash
 * @return {boolean|*}
 */
const checkHash = hash => {
    try {
        if (!hash) return false
        const chunksDir = path.resolve(UPLOAD_PATH, String(hash))
        return fse.existsSync(chunksDir)
    } catch (e) {
        console.log(e.message)
        return false
    }
}

/**
 * 文件上传控制器
 * @CreateBy: Haor
 * @CreateTime: 20220721
 */
class Controller {
    /**
     * 上传切片
     * @param {Object} req 请求
     * @param {Object} res 响应
     */
    async uploadChunks(req, res) {
        try {
            const multipart = new multiparty.Form();
            multipart.parse(req, async (err, fields, files) => {
                try {
                    if (err) {
                        console.error("err----", err);
                        res.status = 500;
                        return res.end(
                            Result.fail("process file chunk failed")
                        );
                    }
                    const [hash] = fields.hash; // 当前切片hash
                    const [fileHash] = fields.fileHash; // 当前文件容器hash
                    const [fileName] = fields.name;
                    const [index] = fields.index; // 当前切片下标
                    const [total] = fields.total; // 当前文件切片总数
                    const filePath = path.resolve(UPLOAD_PATH, `${fileHash}${Utils.getFileSuffix(fileName)}`); // 文件地址
                    const chunkDir = path.resolve(UPLOAD_PATH, fileHash); // chunk目录地址

                    // 文件存在直接返回
                    if (fse.existsSync(filePath)) {
                        return res.end(Result.fail(`切片${hash}已存在！`));
                    }

                    // 切片目录是否存在，不存在则创建
                    if (!fse.existsSync(chunkDir)) {
                        await fse.mkdirs(chunkDir);
                    }

                    // 把文件移动到当前目录
                    await fse.move(
                        files.chunk[0].path,
                        path.resolve(chunkDir, hash)
                    );

                    const chunks = await fse.readdir(path.resolve(UPLOAD_PATH, String(fileHash))); // 读取目录
                    res.end(
                        Result.ok({
                            uploaded: true,
                            progress: Math.floor((index / total) * 100),
                            isFinish: index === total,
                            uploadList: chunks
                        })
                    );
                } catch (e) {
                    console.log(e);
                    res.end(Result.fail(e.message));
                }
            });
        } catch (e) {
            res.end(Result.fail(e.message));
            console.log(e.message);
        }
    }

    /**
     * 合并切片
     * @param {Object} req 请求
     * @param {Object} res 响应
     * @return {Promise<void>}
     */
    async mergeChunks(req, res) {
        try {
            const {name, hash, size} = await resolvePost(req);
            const chunkDir = path.resolve(UPLOAD_PATH, hash);
            const mergedRes = await mergeChunks(chunkDir, UPLOAD_PATH, name, function sort(chunks) {
                return chunks.sort((a, b) => a.split('-').pop() - b.split('-').pop())
            }).catch(err => {
                console.log("文件合并失败：", err)
                res.end(Result.fail("Merger failure: " + err.message))
            })
            if (mergedRes) {
                fse.rmdirSync(chunkDir); // 删除切片目录
                res.end(Result.ok());
            }
        } catch (e) {
            console.log(e.message);
        }
    }

    /**
     * 获取切片数量
     * @param {Object} req
     * @param {Object} res
     * @return {Promise<void>}
     */
    async getChunks(req, res) {
        try {
            const {hash} = await resolvePost(req);
            // 检查hash是否正确
            if (!checkHash(hash)) {
                return res.end(Result.fail('Hash pass error!'))
            }
            const chunks = await fse.readdir(path.resolve(UPLOAD_PATH, String(hash))); // 读取目录
            res.end(Result.ok({hash: hash, chunks: chunks}));
        } catch (e) {
            console.log(e)
            res.end(Result.fail(e.message))
        }
    }

    /**
     * 验证切片
     * @param req
     * @param res
     * @return {Promise<*>}
     */
    async verifyChunk(req, res) {
        try {
            const {hash, total} = await resolvePost(req);
            // 检查hash是否正确
            if (!checkHash(hash)) {
                return res.end(Result.ok({
                    uploaded: false,
                    chunks: []
                }))
            }
            const chunks = await fse.readdir(path.resolve(UPLOAD_PATH, String(hash))); // 读取目录
            const uploaded = total === chunks.length
            return res.end(Result.ok({
                uploaded: uploaded,
                chunks: uploaded ? [] : chunks
            }))
        } catch (e) {
            console.log(e)
            res.end(Result.fail(e.message))
        }
    }
}

module.exports = Controller;
