/**
 * 等额切割大文件，分多次上传
 * @param {File} file 
 * @param {Number} length 
 */
const createFileChunk = (file, length) => {
  const fileChunkList = [],
    Size = 2 * 1024 * 1024
  const chunkSize = length ? Math.ceil(file.size / length) : Size;
  const template = {
    filename: file.name,
    size: file.size
  }
  if (file.size > chunkSize) {
    let current = 0,
      index = 0
    while (current <= file.size) {
      const offsetEnd = current + chunkSize;
      fileChunkList.push({
        ...template,
        file: file.slice(current, offsetEnd > file.size ? file.size : offsetEnd),
        index: index++,
      })
      current += chunkSize;
    }
    return fileChunkList;
  } else {
    return [{
      ...template,
      index: 0,
      file: file,
    }]
  }
}

/**
 * 使用spark-md5、Worker
 * 计算文件hash值
 * size: 非必须值，仅是为了增加抽样hash准确性
 * @param {Array} fileChunkSlice 
 * @param {Number} size 
 */
const hashSendFnCreate = (fileChunkSlice = [], size) => {
  return new Promise((resolve, reject) => {
    worker.postMessage({
      limit: 20,
      size,
      listFileChunk: fileChunkSlice,
    })
    worker.onmessage = (event) => {
      const {
        percentage,
        hash
      } = event.data
      hashPercentage = parseFloat(percentage)
      if (hash) resolve(hash)
    }
  })
}

/**
 * axios文件上传
 * 必要数据组装
 * @param {Array} sureSendSlice 
 */
const dataWrapper = (sureSendSlice = [], hash_accept) => {
  return sureSendSlice.map(
    ({
      file,
      index,
      filename,
      size
    }) => {
      const formData = new FormData();
      formData.append("file", file);
      formData.append("hash", `${index}_${hash_accept}`);
      formData.append("file_name", filename);
      formData.append("size", size);
      formData.append("hash_accept", hash_accept);
      return formData;
    }
  );
}