/**
 * 快速计算文件hash值,支持Promise调用风格
 * @param file 文件
 * @returns
 */
import SparkMD5 from 'spark-md5'

import type { FileObjType } from '../../types/index.ts'
import { FormatFileSize } from '../../utils/index.ts'
import { Message } from '../../hooks/Message.ts'

// 计算文件哈希值(会阻塞页面)
const QuickFileHash = (file: File) => {
  return new Promise((resolve, reject) => {
    // 读取文件
    const reader = new FileReader()
    // 指定读取的文件
    reader.readAsArrayBuffer(file)

    reader.onload = () => {
      // 构建hash值对象
      const hash = new SparkMD5.ArrayBuffer()
      hash.append(reader.result as ArrayBuffer)
      resolve(hash.end())
    }
    reader.onerror = reject
  })
}
// Worker计算文件哈希值(不会阻塞页面)
const HashWorker = (fileList: FileObjType[], Hooks: any, LogService: any) => {
  return new Promise((resolve, reject) => {
    try {
      let count = 0

      async function runFun() {
        const file = fileList[count]

        // 跳过已上传的文件
        if (file.fileStatus == 'success') {
          count++
          runFun()
          return
        }

        file.fileStatus = 'checkMD5'

        const blobArray = file.chunks.map((i) => new Blob([i]))

        const startTime = Date.now()

        // 以模块方式加载线程处理程序
        const hashWorker = new Worker(new URL('./hash-worker.js', import.meta.url), {
          type: 'module',
        })
        // 发送文件
        hashWorker.postMessage({ file: file.fileSource, chunks: blobArray })
        // 处理线程消息
        hashWorker.onmessage = (e) => {
          if (e.data.type === 'success') {
            const endTime = Date.now()
            // 用时 单位/秒
            const duration = (endTime - startTime) / 1000

            const hashLog = { 
              fileName: file.fileName, 
              fileSize: FormatFileSize(file.fileSize),
              chunkCount: file.chunks.length, 
              duration 
            }

            // 触发钩子
            Hooks?.executeHook('onFileHashed', hashLog)

            hashWorker.terminate() // 计算完成后终止Worker

            file.fileHash = e.data.fileHash
            file.chunkList.forEach((item: any, index) => {
              item.chunkHash = e.data.chunksHash[index]
              item.fileHash = e.data.fileHash
            })

            // hash计算完成
            if (count === fileList.length - 1) {
              
              resolve(true)
              return
            }
            count++
            runFun()
          }
        }
        // 处理线程错误
        hashWorker.onerror = (error) => {
          hashWorker.terminate() // 终止Worker
          reject(error)
        }
      }
      runFun()
    } catch (error) {
      reject(error)
    }
  })
}
// 计算字符串哈希值
const QuickStringHash = (str: string) => {
  return new Promise((resolve, reject) => {
    const hexHash = SparkMD5.hash(str)
    resolve(hexHash)
  })
}

export default { QuickFileHash, QuickStringHash, HashWorker }
