import {useUserStore} from "~/store/modulers/user";
import {storeToRefs} from "pinia";
import {BaseApi} from "~/icesoft/utils/service/BaseApi";
import SparkMD5 from "spark-md5";

const MAXSIZE = 1024 * 1024;
const img_action = '/file/img/upload';
const action = '/file/upload';

export class FileApi extends BaseApi<any> {

    async onImageFile(file: File, isPublic: boolean = true) {
        const userStore = useUserStore();
        const {token} = storeToRefs(userStore);
        const md5: {
            md5: string,
            sliceMd5s: {
                currentChunk: number,
                md5: string,
            }[]
        } = await this.fileMd5(file);

        const formData = new FormData();
        formData.set("file", file);
        formData.set("md5", md5.md5);
        formData.set("isPublic", String(isPublic));
        formData.set("fileName", file.name);
        return await this.http.post(img_action, formData);
    }

    async onBigFile(file: File) {
        const userStore = useUserStore();
        const {token} = storeToRefs(userStore);
        const chunks = Math.ceil(file.size / MAXSIZE);
        let currentChunk = 0;

        const md5: {
            md5: string,
            sliceMd5s: {
                currentChunk: number,
                md5: string,
            }[]
        } = await this.fileMd5(file);

        for (let i = 0; i < chunks; i++) {
            const start = currentChunk * MAXSIZE;
            const end = ((start + MAXSIZE) >= file.size) ? file.size : start + MAXSIZE;
            const chunk = file.slice(start, end);

            const formData = new FormData();
            formData.set("file", chunk);
            formData.set("md5", md5.md5);
            formData.set("chunkNumber", (currentChunk + 1) as any);
            formData.set("totalChunks", chunks as any);
            formData.set("fileName", file.name);
            currentChunk++;
            if (chunks === (currentChunk)) {
                return await this.http.post(action, formData, {headers: {"Authorization": "Bearer " + token}});
            } else {
                await this.http.post(action, formData, {headers: {"Authorization": "Bearer " + token}});
            }
        }
    }

    fileMd5(file: Blob): Promise<any> {
        return new Promise(resolve => {
            let arr: any[] = [];
            let fileSize = 0;
            let blobSlice = File.prototype.slice || (File.prototype as any).mozSlice || (File.prototype as any).webkitSlice;
            const fileReader = new FileReader();
            const chunks = Math.ceil(file.size / MAXSIZE);
            let currentChunk = 0;

            //注意点，网上都是 这一步都是有问题的， SparkMD5.ArrayBuffer()
            const allSpark = new SparkMD5.ArrayBuffer();
            fileReader.onload = async function (e) {
                const spark = new SparkMD5.ArrayBuffer();
                allSpark.append((e.target?.result as ArrayBuffer));
                spark.append((e.target?.result as ArrayBuffer));
                const md5 = spark.end();
                currentChunk++;
                let obj = {
                    currentChunk: currentChunk,
                    md5: md5,
                };
                fileSize += ((e.target?.result as ArrayBuffer).byteLength);
                if (currentChunk < chunks) {
                    arr.push(obj);
                    loadNext();
                } else {
                    arr.push(obj);
                    resolve({
                        md5: allSpark.end(),
                        sliceMd5s: arr,
                    });
                }
            };
            fileReader.onerror = function () {
                console.warn("FileReader error.");
            };

            function loadNext() {
                const start = currentChunk * MAXSIZE;
                const end = ((start + MAXSIZE) >= file.size) ? file.size : start + MAXSIZE;
                // 注意这里的 fileRaw
                fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
            }

            loadNext();
        });
    }

}

