import SparkMD5 from "spark-md5";

const MAXSIZE = 1024 * 1024;

export function fileMd5(file: Blob): Promise<any> {
  return new Promise(resolve => {
    let arr: any[] = [];
    let fileSize = 0;
    let blobSlice = File.prototype.slice || (File.prototype as any).mozSlice || (File.prototype as any).webkitSlice;
    const fileReader = new FileReader();
    const chunks = Math.ceil(file.size / MAXSIZE);
    let currentChunk = 0;

    //注意点，网上都是 这一步都是有问题的， SparkMD5.ArrayBuffer()
    const allSpark = new SparkMD5.ArrayBuffer();
    fileReader.onload = async function(e) {
      const spark = new SparkMD5.ArrayBuffer();
      allSpark.append((e.target?.result as ArrayBuffer));
      spark.append((e.target?.result as ArrayBuffer));
      const md5 = spark.end();
      currentChunk++;
      let obj = {
        currentChunk: currentChunk,
        md5: md5,
      };
      fileSize += ((e.target?.result as ArrayBuffer).byteLength);
      if (currentChunk < chunks) {
        arr.push(obj);
        loadNext();
      } else {
        arr.push(obj);
        resolve({
          md5: allSpark.end(),
          sliceMd5s: arr,
        });
      }
    };
    fileReader.onerror = function() {
      console.warn("FileReader error.");
    };

    function loadNext() {
      const start = currentChunk * MAXSIZE;
      const end = ((start + MAXSIZE) >= file.size) ? file.size : start + MAXSIZE;
      // 注意这里的 fileRaw
      fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
    }

    loadNext();
  });
}
