import SparkMD5 from 'spark-md5';
import useThreadWorker, { MAX_THREAD_COUNT } from './useThreadWorker';

const CHUNK_SIZE = 1024 * 1024 * 10;

type FileChunk = {
  start: number,
  end: number,
  index: number,
  hash: string
  blob: Blob
}

export default async function (file: any, uploadMethods?: (fileList: FileChunk[]) => void, chunkSize: number = CHUNK_SIZE) {
  // 获取所有的文件列表
  const fileList = await cutFile(file, chunkSize)
  // 并发请求接口上传文件
  if (uploadMethods) {
    uploadMethods(fileList)
  } else {
    // 默认的上传请求接口
  }
}

/**
 * 使用多线程对文件进行分片
 * @param file 
 * @returns 
 */
export async function cutFile(file: any, chunkSize: number = CHUNK_SIZE): Promise<FileChunk[]> {
  return new Promise((resolve) => {
    // 获取分片数量
    const chunkCount = Math.ceil(file.size / chunkSize);
    // 获取每个线程要处理的分片数量
    const threadChunkCount = Math.ceil(chunkCount / MAX_THREAD_COUNT);
    // 分片结果数组
    const result: FileChunk[] = [];
    // 定义结束数量
    let finishCount = 0;
    // 传递消息的方法，返回要传递的消息对象
    function postMessage(worker: Worker, index: number): any {
      const start = index * threadChunkCount;
      const end = chunkCount < (index + 1) * threadChunkCount ? chunkCount : (index + 1) * threadChunkCount;
      return {
        file,
        chunkSize,
        startChunkIndex: start,
        endChunkIndex: end
      }
    }
    // 获取线程的消息，可以获取到每一个分片的结果
    function onMessage(worker: Worker, index: number) {
      const start = index * threadChunkCount;
      const end = chunkCount < (index + 1) * threadChunkCount ? chunkCount : (index + 1) * threadChunkCount;
      return (e: MessageEvent<any>) => {
        for (let i = start; i < end; i++) {
          result[i] = e.data[i - start];
        }
        worker.terminate();
        finishCount++;
        if (finishCount === MAX_THREAD_COUNT) {
          resolve(result)
        }
      }
    }

    useThreadWorker("../workers/createChunkWorker.ts", postMessage, onMessage)
  })
}

/**
 * 创建一个分片
 * @param file 文件
 * @param index 当前分配下标
 * @param chunkSize 分片大小
 * @returns 
 */
export function createChunk(file: File, index: number, chunkSize: number = CHUNK_SIZE): Promise<FileChunk> {
  return new Promise((resolve) => {
    const start = index * chunkSize;
    let end = start + chunkSize;
    end = file.size < end ? file.size : end;
    const spark = new SparkMD5.ArrayBuffer();
    const fileReader = new FileReader();
    const blob = file.slice(start, end);
    fileReader.onload = (e) => {
      if (!e || !e.target) {
        throw new Error("e或e.target是空，请检查数据！");
      }
      spark.append(e.target.result as ArrayBuffer);
      resolve({
        start,
        end,
        index,
        hash: spark.end(),
        blob
      })
    }
    fileReader.readAsArrayBuffer(blob);
  })
}