import SparkMd5 from "spark-md5";
export function creatChunks(file, index, chunkSize) {
  return new Promise((resolve) => {
    const start = index * chunkSize;
    const end = start + chunkSize;
    const spark = new SparkMd5.ArrayBuffer(); //这里要使用ArrayBuffer()方法，不然生成的hash每个切片都是一样的
    const blob = file.slice(start, end);
    const fileReader = new FileReader();
    fileReader.onload = (e) => {
      spark.append(e.target.result);
      resolve({
        start,
        end,
        index,
        hash: spark.end(),
        blob,
      });
    };
    fileReader.readAsArrayBuffer(blob);
  });
}
