const fse = require('fs-extra') // fs文件扩展模块api
const path = require('path')
const { pipeline } = require('stream')

// NODEJS大文件分片上传、断点续传(HTTP/WEBSOCKET)
// https://blog.csdn.net/weixin_36136424/article/details/107722188

exports.resolvePost = (req) =>
  new Promise((resolve) => {
    let chunk = ''
    req.on('data', (data) => {
      chunk += data
    })
    req.on('end', () => {
      resolve(JSON.parse(chunk))
    })
  })

// 清理目录以及目录下文件
exports.clearDirAndFiles = (chunkDir) =>
  fse.readdir(chunkDir, function (err, data) {
    if (err) {
      console.log('文件读取失败')
    } else {
      data.forEach(function (item) {
        fs.unlinkSync(chunkDir + '/' + item)
        console.log(`删除临时分片文件-> ${chunkDir + '/' + item}`)
      })
      // 合并后删除保存切片的目录
      fse.rmdirSync(chunkDir)
      console.log(`清理临时目录以及该目录下文件: ${chunkDir} 成功`)
    }
  })

// 写入文件流
const pipeStream = (path, writeStream) => {
  console.log('写入文件流 -> ', path)
  return new Promise((resolve) => {
    const readStream = fse.createReadStream(path)
    readStream.on('end', () => {
      fse.unlinkSync(path) // fs.unlink可以删除文件
      resolve()
    })
    readStream.pipe(writeStream)
  })
}

// 合并切片  nodejs v16.14.2
exports.mergeFileChunk = async (chunkDir, targetFile, chunkSize) => {
  const chunkPaths = await fse.readdir(chunkDir) // 读取一个目录
  // 否则直接读取目录获得的顺序会错乱
  chunkPaths.sort((a, b) => a.split('-')[1] - b.split('-')[1])
  // 并发写入文件
  await Promise.all(
    chunkPaths.map((chunkPath) => {
      const index = parseInt(chunkPath.split('-').pop())
      const start = index * chunkSize
      const end = (index + 1) * chunkSize

      console.log(`file: ${chunkPath} | start : ${start} -> end : ${end}`)
      // console.log('写入流', chunkPath, index)
      // console.log('chunkPath -> ', path.resolve(chunkDir, chunkPath))
      return pipeStream(
        path.resolve(chunkDir, chunkPath), // 读取文件流
        // 根据size在指定位置创建可写流
        fse.createWriteStream(targetFile, {
          start,
          end
        })
      )
    })
  )
  // 合并后删除保存切片的目录
  fse.rmdirSync(chunkDir)
}

// 合并分片
exports.mergeChunks = async (chunkDir, targetFile, callback) => {
  // 获取源文件目录下的所有文件
  const chunkPaths = await fse.readdir(chunkDir)
  // 否则直接读取目录获得的顺序会错乱
  chunkPaths.sort((a, b) => a.split('-')[1] - b.split('-')[1])
  // 采用Stream方式合并
  const targetStream = fse.createWriteStream(targetFile)
  const readStream = function (chunkArray, cb) {
    const filename = chunkArray.shift()
    const filepath = path.join(chunkDir, filename)
    const originStream = fse.createReadStream(filepath)
    originStream.pipe(targetStream, { end: false })
    originStream.on('end', function () {
      // 删除文件
      fse.unlinkSync(filepath)
      if (chunkArray.length > 0) {
        readStream(chunkArray, callback)
      } else {
        cb()
      }
    })
  }
  readStream(chunkPaths, callback)
}

/**
 * Stream 合并
 * @param { String } sourceFileDirectory 源文件目录
 * @param { String } targetFile 目标文件
 */
exports.streamMerge = (sourceFileDirectory, targetFile) => {
  const scripts = fse.readdirSync(path.resolve(__dirname, sourceFileDirectory)) // 获取源文件目录下的所有文件
  const fileWriteStream = fse.createWriteStream(
    path.resolve(__dirname, targetFile)
  ) // 创建一个可写流

  // fs.readdir 读取出来的结果，根据具体的规则做下排序，防止因为顺序不对导致最终合并之后的文件无效。

  return streamMergeRecursive(scripts, fileWriteStream, sourceFileDirectory)
}

/**
 * Stream 合并的递归调用
 * @param { Array } scripts
 * @param { Stream } fileWriteStream
 */
function streamMergeRecursive(
  scripts = [],
  fileWriteStream,
  sourceFileDirectory
) {
  // 递归到尾部情况判断
  if (!scripts.length) {
    return fileWriteStream.end("console.log('Stream 合并完成')") // 最后关闭可写流，防止内存泄漏
  }

  const currentFile = path.resolve(
    __dirname,
    sourceFileDirectory,
    scripts.shift()
  )
  const currentReadStream = fse.createReadStream(currentFile) // 获取当前的可读流

  currentReadStream.pipe(fileWriteStream, { end: false })
  currentReadStream.on('end', function () {
    streamMergeRecursive(scripts, fileWriteStream, sourceFileDirectory)
  })

  currentReadStream.on('error', function (error) {
    // 监听错误事件，关闭可写流，防止内存泄漏
    console.error(error)
    fileWriteStream.close()
  })
}
