var express = require('express');
var router = express.Router();
const multiparty = require('multiparty');
const path = require('path');
const fse = require('fs-extra');
const url = require('url');

const UPLOAD_DIR = path.resolve(__dirname, '..', 'target'); // 大文件存储目录

/* GET home page. */
router.get('/', function(req, res, next) {
  res.render('index', { title: 'Express' });
});

// 切片上传
router.post("/fileChunk", (req, res, next) =>{
  const multipart = new multiparty.Form(); // 使用 content-type 解析 http 请求multipart/form-data，也称为文件上传。
  multipart.parse(req,  async function(err, fields, files) {
    if (err) {
      console.error(err);
      res.status = 500;
      res.end('process file chunk failed');
      return;
    }

    const [chunk] = files.file;
    const [hash] = fields.md5;
    const [filename] = fields.fileName;
    console.log('handleFileChunk -> filename', filename);

    const filePath = path.resolve(UPLOAD_DIR, `${hash}`); // 文件路径
    const chunkDir = path.resolve(UPLOAD_DIR, hash); // 切片文件
    console.log("chunkDir", chunkDir)

    // 切片目录不存在，创建切片目录
    if (!fse.existsSync(chunkDir)) {
      await fse.mkdirs(chunkDir);
    }

    try {
      await fse.move(chunk.path, path.resolve(chunkDir, filename));
    } catch (error) {
      console.log('handleFileChunk -> error', error);
    }
    res.send({
      code: 200,
      message: '切片上传成功'
    })
    return;
  });

})

// 合并切片
router.post("/fileChunkMerge", async (req, res, next) =>{
   var params = req.body
   const { md5, fileName, fileChunkNum } = params;
   const ext = extractExt(fileName);
   const filePath = path.resolve(UPLOAD_DIR, `${md5}${ext}`);
   console.log("filePath:", filePath)
   await mergeFileChunk(filePath, md5, fileChunkNum);
    res.send({
      code: 2000,
      message: '合并成功'
    });
    return;
})

// 提取切片名
const extractExt = (filename) =>{
  return  filename.slice(filename.lastIndexOf('.'), filename.length);
}

// 合并切片流 递归
const mgStream = (arr, merge) =>{
  if (!arr.length) {
    merge.end("合并完成")
    return
  }
  var curfile = arr.shift()
  stream = fse.createReadStream(curfile)
  // 默认配置值为true,此时读取器终止时默认终止写入器（可写流），故需要为false
  stream.pipe(merge, { end: false })
  // 在当前可读流完毕时，执行递归
  stream.on("end", function () {
    console.log("完成 " + curfile + " 文件合并");
    fse.unlinkSync(curfile);
    mgStream(arr, merge);
  })
}

// 合并切片流- await
const pipeStream = (path, writeStream) =>{
  return new Promise((resolve) =>{
    const readStream = fse.createReadStream(path);
    readStream.on('end', () => {
      fse.unlinkSync(path);
      console.log("完成 " + path + " 文件合并");
      resolve();
    });
    readStream.pipe(writeStream, { end: false });
  })
}

// 合并流操作
const mergeFileChunk = async (filePath, fileHash, size) => {
  const chunkDir = path.resolve(UPLOAD_DIR, fileHash);
  const chunkPaths = await fse.readdir(chunkDir);
  // 根据切片下标进行排序，否则直接读取目录的获得的顺序可能会错乱
  chunkPaths.sort(function(a, b){return Number(a) - Number(b)}); 
  // 第一步：创建一个可写流
  let merge = fse.createWriteStream(filePath);

  // 第三步：合并切片流-注意文件流位置
  for await (const chunkPath of chunkPaths){
    await pipeStream(path.resolve(chunkDir, chunkPath), merge)
  }
  // 第四步：删除切片文件
  fse.rmdirSync(chunkDir, { recursive: true, force: true }); // 合并后删除保存切片的目录
  return
};
module.exports = router;
