import SparkMD5 from "spark-md5"
import {type FileStatus, getUploadStatus, uploadLarge, uploadSmall} from "@/apis/oss";
import {useFileStore} from "@/stores/modules/file";
import eventBus from "@/stores/event";
import {Message} from "@arco-design/web-vue";

const FILE_MAX_SIZE = 1024 * 1024 * 2
const sparkMD5 = new SparkMD5.ArrayBuffer();
const spark = new SparkMD5.ArrayBuffer();
const fileStore = useFileStore()

export interface uploadFileChunk {
    id: string,
    hash: string,
    chunkHash:string,
    file: Blob,
    index: string,
    fileName?:string
}



/**
 * 切片文件
 * @param file
 */
const sliceFile = (file:Blob) :Blob[]  => {

    const fileSlice :Blob[] = []

    for (let i = 0; i < file.size; i+=FILE_MAX_SIZE) {
        fileSlice.push(file.slice(i, i + FILE_MAX_SIZE))
    }

    return fileSlice;
}


/**
 * 计算小文件hash
 * @param file
 */
const parseFileMd5 = async (file:Blob) :Promise<string> => {

    const fileRead = new FileReader();
    let md5 = "";

    fileRead.readAsArrayBuffer(file)

    await new Promise(resolve => {
        fileRead.onload = (e) => {
            spark.append(e.target!.result as ArrayBuffer)

            md5 = spark.end();
            resolve(md5)
        }
    })


    return md5
}


/**
 * 计算大文件hash
 * @param file
 */
const parseBigFileMd5 = async (file:File) :Promise<uploadFileChunk[]> => {
    const blobs = sliceFile(file);

    const chunks :uploadFileChunk[] = []


    for (let i = 0; i < blobs.length; i++) {

        let chunkMd5 = await parseFileMd5(blobs[i]);

        let reader = new FileReader();

        reader.readAsArrayBuffer(blobs[i]);

        await new Promise(resolve => {
            reader.onload =  (e) => {
                sparkMD5.append(e.target!.result as ArrayBuffer)
                resolve("ok")
            }
        })


        chunks.push({
            id: new Date().toString(),
            hash: '',
            chunkHash: chunkMd5,
            file: blobs[i],
            index: i + "",
            fileName: file.name
        })

    }

    let fileHash = sparkMD5.end()

    for (let i = 0;i < chunks.length; i++) {

        chunks[i].hash = fileHash

    }

    return chunks;
}

/**
 * 上传文件
 * @param file
 * @param userId
 * @param subject
 */
export const uploadFile = async (file:File, userId: string, subject: string) => {
    let formData = new FormData();
    formData.append("file", file);
    formData.append("userId", userId);
    formData.append("subject", subject);
    let response;
    await uploadSmall(formData).then(res => {
        response = res
    })
    return response
}


export const uploadLargeFile = async (file:File, user: string, subject: string) => {
    Message.success("计算文件hash中")
    const chunks = await parseBigFileMd5(file);
    Message.success("文件hash计算完成")
    let index = 0;

    let response;
    Message.success("开始上传")
    const task = async () => {

        index = await getFileStatus(chunks[index].hash, user, index) as number

        if (index >= chunks.length) {
            //合并
            response = fileLargeMerge(user, chunks[0].hash, subject, file.name)
            fileStore.isHaveFileUpload = false;
            return;
        }

        //上传
        await fileLargeUpload(chunks[index].file, user, chunks[index].hash, chunks[index].chunkHash, chunks[index].index)

        fileStore.curFileStatus = (index / chunks.length).toFixed(3) as unknown as number

        setTimeout(() => {
           task()
        }, 1000)

    }

    await task()


}

const getFileStatus = async (hash:string, user: string, index: number) => {
    const res = await getUploadStatus(hash, user)

    let status = res.data.data as FileStatus

    if (status == null) {
        index = 0
        return index
    }

    if (index != status.currentIndex) {
        index = status.currentIndex
        return index;
    }

    if (status.currentIndex === index) {
        index = status.nextIndex;
        return index;
    }

}

const fileLargeMerge = async (user: string, hash:string, subject: string, fileName: string) => {
    let formData = new FormData();
    formData.append("file", new Blob());
    formData.append("userId", user);
    formData.append("subject", subject);
    formData.append("hash", hash);
    formData.append("chunkHash", "end");
    formData.append("index", 0 + "");
    formData.append("fileName", fileName)
    let promise = await uploadLarge(formData);

    Message.success("上传成功")
    eventBus.emit("uploadSuccess", promise.data.data)
    fileStore.curFileStatus = 0.00
    return promise
}

const fileLargeUpload = async (file:Blob, user: string, hash: string, chunkHash:string, index: string) => {

    let formData = new FormData();
    formData.append("file", file);
    formData.append("userId", user);
    formData.append("subject", "");
    formData.append("hash", hash);
    formData.append("chunkHash", chunkHash);
    formData.append("index", index);
    formData.append("fileName", '')

    const res =  await uploadLarge(formData)
    return true
}