const express = require('express');
const multer = require('multer');
const path = require('path');
const fse = require('fs-extra');
const asyncLock = require('async-lock');
const lock = new asyncLock();

const router = express.Router();
const UPLOAD_DIR = path.resolve(__dirname, '../largeUpload');

const upload = multer({ dest: UPLOAD_DIR });

router.post('/checkfile', async (req, res) => {
    const { ext, hash } = req.body;
    const filePath = path.resolve(UPLOAD_DIR, `${hash}.${ext}`);
    let uploaded = false;
    let uploadedList = [];
  
    if (fse.existsSync(filePath)) {
      uploaded = true;
    } else {
      uploadedList = await getUploadedList(path.resolve(UPLOAD_DIR, hash));
    }
    res.json({
      code: 0,
      data: {
        uploaded,
        uploadedList
      }
    });
  });
  
  async function getUploadedList(dirPath) {
    return fse.existsSync(dirPath) ? (await fse.readdir(dirPath)).filter(name => name[0] !== '.') : [];
  }
  


  router.post('/uploadfile', upload.single('chunk'), async (req, res) => {
    try {
      const { hash, name, totalBlock } = req.body;
  
      const chunkPath = path.resolve(UPLOAD_DIR, hash);
      if (!fse.pathExistsSync(chunkPath)) {
        await fse.mkdir(chunkPath);
      }
  
      uploadedList = await getUploadedList(chunkPath);
  
      if (uploadedList.length == totalBlock) {
        return res.json({
          code: -1,
          message: '所有切片已上传'
        });
      }
  
      await fse.move(req.file.path, `${chunkPath}/${name}`, { overwrite: true });
  
      res.json({
        code: 0,
        message: '切片上传成功'
      });
    } catch (err) {
      console.error('上传文件发生错误:', err);
      res.status(500).json({
        code: -1,
        message: '上传文件发生错误'
      });
    }
  });
  
  router.post('/mergefile', async (req, res) => {
    const { ext, size, hash } = req.body;
    const filePath = path.resolve(UPLOAD_DIR, `${hash}.${ext}`);
    await mergeFile(filePath, size, hash);
  
    res.json({
      code: 0,
      data: {
        url: `/largeUpload/${hash}.${ext}`
      }
    });
  });
  
  async function mergeFile(filePath, size, hash) {
    const chunkDir = path.resolve(UPLOAD_DIR, hash);
    let chunks = await fse.readdir(chunkDir);
    chunks = chunks.sort((a, b) => a.split('-')[1] - b.split('-')[1]);
    chunks = chunks.map(cpath => path.resolve(chunkDir, cpath));
    await mergeChunks(chunks, filePath, size);
  }
  
  function mergeChunks(files, dest, CHUNK_SIZE) {
    const pipeStream = (filePath, writeStream) => {
      return new Promise((resolve, reject) => {
        const readStream = fse.createReadStream(filePath);
  
        readStream.on('end', () => {
          fse.unlinkSync(filePath);
          resolve();
        });
  
        readStream.pipe(writeStream);
      });
    };
  
    const pipes = files.map((file, index) => {
      return pipeStream(file, fse.createWriteStream(dest, {
        start: index * CHUNK_SIZE,
        end: (index + 1) * CHUNK_SIZE
      }));
    });
  
    return Promise.all(pipes);
  }
  


module.exports = router
