const path = require('path');
const fse = require('fs-extra');

// 大文件存储目录
const UPLOAD_DIR = path.resolve(__dirname, '..', 'target');

// 提取文件后缀名
const extractExt = filename => {
  return filename.slice(filename.lastIndexOf('.'), filename.length);
};


// 返回已经上传的切片名列表
const createUploadedList = async (fileHash) => {
  // console.log("fileHash --->", fileHash);
  return fse.existsSync(path.resolve(UPLOAD_DIR, `${fileHash}-chunks`))
    ? await fse.readdir(path.resolve(UPLOAD_DIR, `${fileHash}-chunks`))
    : [];
};

// 针对 path 创建 readStream 并写入 writeStream,写入完成之后删除文件
const pipeStream = (path, writeStream) => {
  return new Promise((resolve) => {
    const readStream = fse.createReadStream(path);
    readStream.on("end", () => {
      fse.unlinkSync(path);
      resolve();
    });
    readStream.pipe(writeStream);
  });
}


/**
 * 读取所有的 chunk 合并到 filePath 中
 * @param {String} filePath 文件存储路径
 * @param {String} chunkDir chunk存储文件夹名称
 * @param {String} size 每一个chunk的大小
 */
const mergeFileChunk = async (filePath, chunkDir, size) => {
  // 获取chunk列表
  const chunkPaths = await fse.readdir(chunkDir);
  // 根据切片下标进行排序  否则直接读取目录的获得的顺序可能会错乱
  chunkPaths.sort((a, b) => a.split("-")[1] - b.split("-")[1]);
  await Promise.all(
    chunkPaths.map((chunkPath, index) =>
      pipeStream(
        path.resolve(chunkDir, chunkPath),
        // 指定位置创建可写流
        fse.createWriteStream(filePath, {
          start: index * size,
          end: (index + 1) * size,
        })
      )
    )
  );
  fse.rmdirSync(chunkDir); // 合并后删除保存切片的目录
}



module.exports = {
  extractExt,
  createUploadedList,
  mergeFileChunk,
  UPLOAD_DIR,
  pipeStream
};