
import sparkMd5 from "spark-md5";
class FileUtil {
  static chunkSize = 1024 * 1024 * 5;
  static createChunks(file: File, chunkSize: number, begin: number = 0) {
    const result: {
      partFile: Blob;
      index: number;
    }[] = [];
    for (let i = begin; i < file.size; i += chunkSize) {
      result.push({
        partFile: file.slice(i, i + chunkSize),
        index: i,
      });
    }
  
    return result;
  }
  static async getFileHash(file: File) {//分段计算文件的大小 5mb一段
    let chunks = this.createChunks(file, this.chunkSize);
    let spark = new sparkMd5.ArrayBuffer();
    for (let item of chunks) {
      spark.append(await this.getBlobArrayBuffer(item.partFile));
    }
    let md5Str = spark.end();
    return md5Str;
  }
  private static getBlobArrayBuffer(chunk: Blob): Promise<ArrayBuffer> {
    return new Promise((resolve, reject) => {
      let fileReader = new FileReader();

      fileReader.onloadend = (e) => {
        resolve(e.target.result as ArrayBuffer);
      };
      fileReader.readAsArrayBuffer(chunk);
    });
  }
  public static formatFileSize(bytes: number): string {
    if (bytes === 0) return "0 B";

    const k = 1024; // 1 KB = 1024 bytes
    const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];

    const i = Math.floor(Math.log(bytes) / Math.log(k));
    const size = bytes / Math.pow(k, i);

    return `${size.toFixed(1)} ${sizes[i]}`;
  }
  public static getFileDir(path:string){
    return path.substring( 0,path.lastIndexOf('\\') );
  }
}
export default FileUtil;
