import SparkMD5 from "spark-md5";

const CHUNK_SIZE = 5 * 1024 * 1024; // 5MB 每一个块的大小
export default function cutFile(file: any) {
  const size = file.size; // 56066445
  const chunkCount = Math.ceil(size / CHUNK_SIZE);
  const chunks: IChunk[] = new Array(chunkCount);
  const promises = new Array(chunkCount);
  for (let i = 0; i < chunkCount; i++) {
    promises[i] = createChunk(file, i, CHUNK_SIZE);
  }
  return Promise.all(promises);
}

function createChunk(file: File, index: number, chunkSize: number) {
  return new Promise((resolve) => {
    const start = index * chunkSize;
    const end = Math.min((index + 1) * chunkSize, file.size);
    const chunk = file.slice(start, end);
    const spark = new SparkMD5.ArrayBuffer();
    const fileReader: FileReader = new FileReader();
    fileReader.readAsArrayBuffer(chunk);
    fileReader.onload = (e) => {
      spark.append(e.target?.result as ArrayBuffer);
      resolve({
        file: chunk,
        index,
        size: chunk.size,
        uid: `${file.name}-${index}-${chunkSize}-${new Date().getTime()}`,
        md5: spark.end(),
      });
    };
  });
}

interface IChunk {
  file: Blob; // 分块的文件内容
  index: number; // 分块的索引
  size: number; // 分块的大小
  uid: string; // 分块的唯一标识
  md5: string; // 分块的md5值
}
