import SparkMD5 from 'spark-md5'
export const createChunks = (file, chunkSize) => {
    const result = []
    for (let i = 0; i < file.size; i += chunkSize) {
        result.push(file.slice(i, i + chunkSize))
    }
    return result
}



// 增量哈希
export const hash = (chunks) => {
    return new Promise((resolve, reject) => {
        const spark = new SparkMD5()

        function _read(i) {
            if (i >= chunks.length) {
                resolve(spark.end())
                return
            }
            const blob = chunks[i]
            const reader = new FileReader()
            reader.onload = (e) => {
                const bytes = e.target?.result //读取到的字节
                spark.append(bytes)
                _read(i + 1)
            }
            reader.readAsArrayBuffer(blob)
        }

        _read(0)
    })

}
