import SparkMD5 from 'spark-md5'

export const fileMd5 = (file: File & Blob, callback: (md5: string) => void) => {
  const blobSlice = File.prototype.slice
  const chunkSize = 2097152 // 2MB
  const chunks = Math.ceil(file.size / chunkSize)
  let currentChunk = 0
  const spark = new SparkMD5.ArrayBuffer()
  const fileReader = new FileReader()
  fileReader.onload = (e: any) => {
    spark.append(e.target.result) // Append array buffer
    currentChunk++

    if (currentChunk < chunks) {
      loadNext()
    } else {
      const md5 = spark.end() // 得到md5
      spark.destroy() // 释放缓存
      callback(md5)
    }
  }

  fileReader.onerror = () => {
    console.warn('something went wrong.')
  }

  const loadNext = () => {
    const start = currentChunk * chunkSize
    const end = start + chunkSize >= file.size ? file.size : start + chunkSize

    fileReader.readAsArrayBuffer(blobSlice.call(file, start, end))
  }

  loadNext()
}
