import SparkMD5 from "spark-md5";
export function createChunk(fileitem, index, chunkSize) {
    const { file, ...items } = fileitem
    return new Promise((resolve, reject) => {
        const start = index * chunkSize;
        const end = start + chunkSize;
        let spark = new SparkMD5.ArrayBuffer();
        let fileReader = new FileReader();
        const blob = file.slice(start, end);
        fileReader.onload = function (e) {
            spark.append(e.target.result);
            resolve({
                items,
                start,
                end,
                index,
                MD5: spark.end(),
                blob,
            })
        };
        fileReader.readAsArrayBuffer(blob);
    });

}
export function cutFile(fileitem) {
    const { file } = fileitem;
    const chunkSize = 1024 * 1024 * 5;
    let chunks = Math.ceil(file.size / chunkSize);
    const THREAD_COUNT = Math.min(navigator.hardwareConcurrency || 5, chunks);
    //线程分片数量 = 总分片数量/线程数量
    const threadChunkCount = Math.ceil(chunks / THREAD_COUNT);
    const result = [];
    let finishCount = 0;
    return new Promise((resolve, reject) => {

        const workers = [];
        for (let i = 0; i < THREAD_COUNT; i++) {
            const worker = new Worker(new URL('./worker/index.js', import.meta.url), {
                type: "module",
            });
            let start = i * threadChunkCount;
            let end = (i + 1) * threadChunkCount;
            if (end > chunks) {
                end = chunks;
            }
            workers.push(worker);
            worker.postMessage({
                fileitem,
                file,
                chunkSize,
                StartchunkIndex: start,
                EndchunkIndex: end,
            });
            worker.onmessage = (e) => {
                // for (let i = start; i < end; i++) {
                //     result[i] = e.data[i - start];
                // }

                result[e.data.index] = e.data
                const chunk = e.data;
                // 分片生成后立即上传
                uploadChunk(chunk);
                finishCount++;
                if (finishCount === chunks) {
                    workers.forEach(w => w.terminate());
                    resolve(result);
                }
            };
        }
    });
}
const uploadChunk = (chunk) => {

}
