const chunkSize = 1024 * 1024 * 10   // 每个大小设置为 10M
import apiFile from "@/api/apiFile";
import { useConuntStore } from "@/store/home";
const sliceFile = async (file:File) => {
    return new Promise(reslove => {
        const systemCount = navigator.hardwareConcurrency || 4
        const size = file.size
        const chunks = Math.ceil(size / chunkSize)  //总共可以分的数量
        let count = 0
        const countstore = useConuntStore();
        countstore.processValue = 0
        // 每个线程能分到的分片数量
        const thread_count = Math.ceil(chunks / systemCount)
        for (let i = 0; i < systemCount; i++) {
            const worker = new Worker(new URL('./worker.ts', import.meta.url),{type:'module'})
            let start = i * thread_count
            let end = Math.min(start + thread_count, chunks) // 计算的是当前线程的“结束”索引，确保它不会超出总的分片数 chunks
            worker.postMessage({
                file,
                start,
                end,
                chunkSize,
                chunks,
                processValue:countstore.processValue
            })
            worker.onmessage = async(e) => {
                worker.terminate()
                countstore.processValue += e.data
                count ++
                if(count == systemCount){
                    let data = {
                        fileName: file.name,
                        totalChunks: chunks
                    }
                    let resp = await apiFile.MergeFile(data)
                    reslove(resp)
                }
            }
            worker.onerror = (err) => {
                console.error('Worker error:', err)
                worker.terminate()
            }
        }
    })
}

export { sliceFile }