/*
 * @Author: 天月将白 7749910+lht330@user.noreply.gitee.com
 * @Date: 2024-04-08 11:42:17
 * @LastEditors: 天月将白 7749910+lht330@user.noreply.gitee.com
 * @LastEditTime: 2024-04-17 16:20:10
 * @FilePath: \demo\src\utils\createFileChunk.js
 * @Description: 这是默认设置,请设置`customMade`, 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE
 */
import SparkMD5 from 'spark-md5';

// 默认文件块大小5M
export const CHUNK_SIZE = 1024 * 1024 * 5;

// 文件块编码
/**
 * @description: 
 * @param {File} file 文件对象
 * @param {number} index 文件对象切片后，需要处理的块的下标
 * @param {number} chunkSize 文件对象单块的大小
 * @return {Promise<{start:number,end:number,index:number,hash:string:blob:Blob}>}
 */
export const createFileChunk = function (file, index = 0, chunkSize = CHUNK_SIZE) {
    return new Promise(resolve => {
        const start = index * chunkSize;
        const end = start + chunkSize;
        // 切割文件
        const blob = file.slice(start, end);
        const spark = new SparkMD5.ArrayBuffer();
        const fileReader = new FileReader();
        // 读取文件块并对文件编码
        fileReader.onload = e => {
            spark.append(e.target.result);
            resolve({ start, end, index, hash: spark.end(), blob });
        };
        fileReader.readAsArrayBuffer(blob);
    });
};
