import sparkMd5 from 'spark-md5'
function getHash(file) {
    // 计算hash 文件指纹标识
    let hash;
    hash = await calculateHash(file)
    // web-worker
    hash = await calculateHashWorker(file)
    // requestIdleCallback
    hash = await calculateHashIdle(file)
    return hash;
}

async function calculateHash(file) {
    // 直接计算md5 大文件会卡顿
    const ret = await blobToData(file)
    return sparkMd5.hash(ret)
}

async function blobToData(blob) {
    return new Promise(resolve => {
        const reader = new FileReader()
        reader.onload = function () {
            resolve(reader.result)
        }
        reader.readAsBinaryString(blob)
    })
    // 二进制=》ascii码=》转成16进制字符串
}

// web-worker
async function calculateHashWorker(chunks) {
    return new Promise(resolve => {
        // web-worker 防止卡顿主线程
        this.worker = new Worker("/hash.js");
        this.worker.postMessage({ chunks });
        this.worker.onmessage = e => {
            const { progress, hash } = e.data;
            this.hashProgress = Number(progress.toFixed(2));
            if (hash) {
                resolve(hash);
            }
        };
    });
}

async function calculateHashIdle(chunks) {
    return new Promise(resolve => {
        const spark = new sparkMd5.ArrayBuffer();
        let count = 0;
        const appendToSpark = async file => {
            return new Promise(resolve => {
                const reader = new FileReader();
                reader.readAsArrayBuffer(file);
                reader.onload = e => {
                    spark.append(e.target.result);
                    resolve();
                };
            });
        };
        const workLoop = async deadline => {
            // 有任务，并且当前帧还没结束
            while (count < chunks.length && deadline.timeRemaining() > 1) {
                await appendToSpark(chunks[count].file);
                count++;
                // 没有了 计算完毕
                if (count < chunks.length) {
                    // 计算中
                    this.hashProgress = Number(
                        ((100 * count) / chunks.length).toFixed(2)
                    );
                    // console.log(this.hashProgress)
                } else {
                    // 计算完毕
                    this.hashProgress = 100;
                    resolve(spark.end());
                }
            }
            console.log(`浏览器有任务呢，开始计算${count}个，等待下次浏览器空闲`)

            window.requestIdleCallback(workLoop);
        };
        window.requestIdleCallback(workLoop);
    });
}