import SparkMD5 from 'spark-md5'

interface WorkerData {
  file: File
  start: number
  end: number
  CHUNK_SIZE: number
}

interface ChunkData {
  start: number
  end: number
  chunk: Blob
  index: number
  file: File
  hash: string
}

function createChunks(file: File, index: number, chunkSize: number): Promise<ChunkData> {
  return new Promise((resolve) => {
    const start = index * chunkSize
    const end = Math.min(start + chunkSize, file.size)
    const spark = new SparkMD5.ArrayBuffer()
    const fileReader = new FileReader()
    const blob = file.slice(start, end)

    fileReader.onload = (e) => {
      if (!e.target || !(e.target.result instanceof ArrayBuffer))
        return
      spark.append(e.target.result)

      resolve({
        start,
        end,
        chunk: blob,
        index,
        file,
        hash: spark.end(),
      })
    }
    fileReader.readAsArrayBuffer(blob)
  })
}

onmessage = async (e: MessageEvent<WorkerData>) => {
  const { file, start, end, CHUNK_SIZE } = e.data
  const result: Promise<ChunkData>[] = []

  for (let i = start; i < end; i++) {
    const prom = createChunks(file, i, CHUNK_SIZE)
    result.push(prom)
  }

  const chunks = await Promise.all(result)
  postMessage(chunks)
}
