import SparkMD5 from 'spark-md5'

const CHUNK_SIZE = 1024 * 1024 * 5

const THREAD_COUNT = navigator.hardwareConcurrency || 4

export const FileCut = (files: File) => {
    return new Promise((resolve) => {
        let result = []
        let count = Math.ceil(files.size / CHUNK_SIZE)
        let workerChunkCount = Math.ceil(count / THREAD_COUNT)
        let finish = 0
        for( let i = 0; i < THREAD_COUNT; i++ ) {
            const worker = new Worker(new URL('./worker.ts', import.meta.url), {
                type: 'module'
            })
            let startIndex = i * workerChunkCount
            let endIndex = (startIndex + 1) + workerChunkCount
            if (endIndex > count) {
                endIndex = count
            }
            worker.postMessage({
                files,
                CHUNK_SIZE,
                startIndex,
                endIndex
            })
            worker.onmessage = (e) => {
                for(let i = startIndex; i < endIndex; i++ ) {
                    result[i] = e.data[i - startIndex]
                }
                worker.terminate()
                finish++
                if(finish === THREAD_COUNT) {
                    resolve(result)
                }
            }
        }
    })
}

export const FileChunk = async (files: File, index: number, size: number) => {
    return new Promise((resolve) => {
        const start = index * size
        var end = start + size
        if (end > files.size) {
            end = files.size
        }
        const spark = new SparkMD5.ArrayBuffer()
        const fileReader = new FileReader()
        const blob = files.slice(start, end)
        fileReader.onload = (e) => {
            const bytes = e.target?.result as ArrayBuffer
            spark.append(bytes)
            resolve({
                start,
                end,
                index,
                hash: spark.end(),
                blob
            })
        }
        fileReader.readAsArrayBuffer(files.slice(start,end))
    })
}

export const FileHash = (files: File) => {
    return new Promise((resolve) => {
        const spark =  new SparkMD5.ArrayBuffer()
        const fileReader = new FileReader()
        fileReader.onload = (e) => {
            const bytes = e.target?.result as ArrayBuffer
            spark.append(bytes)
            resolve(spark.end())
        }
        fileReader.readAsArrayBuffer(files)
    })
}


export const FileSize = (byteSize:number) => {
    if (byteSize >= 1024 ** 3) {
        return (byteSize / (1024 ** 3)).toFixed(2) + ' GB';
    }
    if (byteSize >= 1024 ** 2) {
        return (byteSize / (1024 ** 2)).toFixed(2) + ' MB';
    }
    if (byteSize >= 1024) {
        return (byteSize / 1024).toFixed(2) + ' KB';
    }
    return (byteSize || 0) + ' bytes';
}
