// import { createChunk } from '@/utils/createChunk'
import SparkMD5 from 'spark-md5'

export default () => {
  const createChunk = (file, index, chunkSize) => {
    return new Promise((resolve) => {
      const start = index * chunkSize
      const end = start + chunkSize
      let spark = new SparkMD5.ArrayBuffer()
      let fileReader = new FileReader()
      fileReader.onload = (e) => {
        spark.append(e.target.result)
        resolve({
          start,
          end,
          index,
          hash: spark.end()
        })
      }
      fileReader.readAsArrayBuffer(file.slice(start, end))
    })
  }
  onmessage = async (e) => {
    if (typeof e == 'undefined') {
      console.log(undefined)
    }
    if (e?.data != '[]' && e?.data.length > 0) {
      console.log(11, e, JSON.parse(e?.data))
      const proms = []
      const { file, CHUNK_SIZE, startIndex, endIndex } = JSON.parse(e?.data)
      console.log(333, { file, CHUNK_SIZE, startIndex, endIndex })
      for (let i = startIndex; i < endIndex; i++) {
        proms.push(createChunk(file, i, CHUNK_SIZE))
      }
      const chunk = await Promise.all(proms)
      // 将 chunk 对象转换为可以序列化的格式，如字符串或数组
      // const serializedChunk = JSON.stringify(chunk)
      // 发送分片结果给主线程
      console.log(222, chunk)
      postMessage(chunk)
    }
  }
}
