import SparkMD5 from "spark-md5";

interface IChunk {
  start: number;
  end: number;
  index: number;
  chunkHash: string;
  chunk: Blob;
}

/**
 * @description 计算文件hash
 * @param chunks 文件切片列表
 * @returns
 */
export function getFileHash(chunks: IChunk[]): Promise<string> {
  return new Promise((resolve) => {
    const spark = new SparkMD5.ArrayBuffer();

    function readChunk(i: number) {
      /** 当前i大于等于分片列表的长度停止递归，返回文件hash */
      if (i >= chunks.length) {
        const fileHash = spark.end();
        resolve(fileHash);
        return;
      }

      const reader = new FileReader();

      reader.onload = (e) => {
        const chunkArrayBuffer = e.target?.result as ArrayBuffer;
        spark.append(chunkArrayBuffer);
        /** 读完一个分片接着读下一个分片，递归 */
        readChunk(i + 1);
      };

      reader.readAsArrayBuffer(chunks[i].chunk);
    }
    readChunk(0);
  });
}
