import SparkMD5 from 'spark-md5';

// 计算文件hash
export const calculateHash = async (file: File): Promise<string> => {
    return new Promise((resolve) => {
        const spark = new SparkMD5.ArrayBuffer();
        const reader = new FileReader();
        reader.readAsArrayBuffer(file);
        reader.onload = (e) => {
            spark.append(e.target?.result as ArrayBuffer);
            resolve(spark.end());
        };
    });
};

// 文件分片
export const createFileChunks = (file: File, chunkSize: number = 5 * 1024 * 1024): Blob[] => {
    const chunks: Blob[] = [];
    let cur = 0;
    while (cur < file.size) {
        chunks.push(file.slice(cur, cur + chunkSize));
        cur += chunkSize;
    }
    return chunks;
}; 