// var SparkMD5 = require('spark-md5')
import SparkMD5 from 'spark-md5'
const CHUNK_SIZE = 1024 * 1024 * 5 //切片大小5M
//切片
async function cutFile(file) {
  const chunkCount = Math.ceil(file.size / CHUNK_SIZE) //计算切片数量
  const result = []
  for (let i = 0; i < chunkCount; i++) {
    const prom = createChunk(file, i, CHUNK_SIZE)
    result.push(prom)
  }
  console.log('切片结果', result)
  return await Promise.all(result)
}

//
function createChunk(file, i, CHUNK_SIZE) {
  return new Promise((resolve, reject) => {
    const start = i * CHUNK_SIZE
    const end = start + CHUNK_SIZE
    const spark = new SparkMD5.ArrayBuffer()
    const fileReader = new FileReader()
    const blob = file.slice(start, end)
    // fileReader.readAsArrayBuffer(file)
    fileReader.onload = (e) => {
      spark.append(e.target.result)
      resolve({
        start,
        end,
        i,
        hash: spark.end(),
        chunk: blob
      })
    }
    fileReader.readAsArrayBuffer(blob)
  })
}

// // 创建切片
// function createChunk(file, i, CHUNK_SIZE) {
//   const chunk = file.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE)
//   const chunkFileName = `${file.name}-${i}`
//   return {
//     chunk,
//     chunkFileName
//   }
// }
// //计算md5
// function calculateMd5(file) {
//   return new Promise((resolve, reject) => {
//     const fileReader = new FileReader()
//     fileReader.readAsArrayBuffer(file)
//     fileReader.onload = (e) => {
//       const spark = new SparkMD5.ArrayBuffer()
//       spark.append(e.target.result)
//       resolve(spark.end())
//     }
//   })
// }
export { cutFile }
