const router = require('koa-router')()
const { koaBody } = require('koa-body')
const path = require('path')
const fs = require('fs')
const outputPath = path.join(__dirname, './upload')
router.post('/upload', koaBody({
  multipart: true,
  formidable: {
    uploadDir: outputPath,
    onFileBegin: (name, file) => {
      const [filename, fileHash, index] = name.split('-')
      const dir = path.join(outputPath, filename)
      // 检查文件是否存在，不存在的话新建
      if (!fs.existsSync(dir)) {
        fs.mkdirSync(dir)
      }
      file.filepath = `${dir}/${fileHash}-${index}`
    }
  }
}), async (ctx) => {
  ctx.set('Content-Type', 'application/json')
  ctx.body = JSON.stringify({
    data: { code: 2000 },
    message: 'update successful！'
  })
})

router.post('/mergeChunks', async (ctx) => {
  const {
    filename,
    size
  } = ctx.request.body
  // 合并chunk
  const mergeFileChunk = async (filename, size) => {
    // 读文件流
    const readPath = path.join(outputPath, filename)
    const chunkList = fs.readdirSync(readPath)
    const chunkListLength = chunkList.length
    if (!chunkListLength) {
      return false
    }
    // 按照切割的序号升序
    chunkList.sort((a, b) => a.split('-')[1] - b.split('-')[1])
    let count = 0
    chunkList.forEach((chunkPath, index) => {
      const readStream = fs.createReadStream(path.resolve(readPath, chunkPath))
      const writeStream = fs.createWriteStream(path.join(outputPath, '_' + filename), {
        start: index * size
      })
      readStream.pipe(writeStream)
      readStream.on('end', () => {
        fs.unlinkSync(path.resolve(readPath, chunkPath))
        count++
        if (count >= chunkListLength) {
          // 写入完成后清掉原始目录
          setTimeout(() => {
            fs.rmdirSync(readPath)
          }, 500)
        }
      })
    })
  }
  await mergeFileChunk(filename, size)
  ctx.set('Content-Type', 'application/json')
  ctx.body = JSON.stringify({
    data: { code: 2000 },
    message: 'successful！'
  })
})

module.exports = router
