// importScripts('./spark-md5.min')
import SparkMD5 from 'spark-md5'
importScripts('spark-md5')
const workercode = () => {
  onmessage = async event => {
    let { file, chunkSize, spark } = event.data
    //文件切片
    const chunks = createChunk(file, chunkSize)
    //计算hash
    const fileHash = await hash(chunks)

    postMessage({
      fileHash,
      chunks: chunks
    })
  }
}

let code = workercode.toString()
code = code.substring(code.indexOf('{') + 1, code.lastIndexOf('}'))

const blob = new Blob([code], { type: 'application/javascript' })

const worker_script = URL.createObjectURL(blob)

export default worker_script

//文件切片

// import SparkMD5 from 'spark-md5'
// const createChunk = (file, chunkSize) => {
//   const results = []
//   for (let i = 0; i < file?.size; i += chunkSize) {
//     results.push(file.slice(i, i + chunkSize))
//   }
//   return results
// }

// //哈希命名
// const hash = chunks => {
//   return new Promise(resolve => {
//     const spark = new SparkMD5()
//     const readFile = async i => {
//       //读取完成,返回结果
//       if (i >= chunks.length) {
//         resolve(spark.end())
//         return
//       }

//       const blob = chunks[i]
//       const reader = new FileReader()
//       reader.onload = e => {
//         //读取到的字节数
//         const bytes = e.target.result
//         spark.append(bytes)
//         readFile(i + 1)
//       }
//       reader.readAsArrayBuffer(blob)
//     }
//     readFile(0)
//   })
// }

// onmessage = async event => {
//   let { file, chunkSize } = event.data
//   //文件切片
//   const chunks = createChunk(file, chunkSize)
//   //计算hash
//   const fileHash = await hash(chunks)

//   postMessage({
//     fileHash,
//     chunks: chunks
//   })
// }
