import SparkMD5 from 'spark-md5'
import {getLargeFileInfo,upLargeFileData} from "apis/large-file-upload.js"

const msgInfo = {type: null, uid: null, uploadStatus: 0, md5: '', progress: 0}
const msgType = {STATUS: 1, PROGRESS: 2}
let isDelete = false
let workStatus = 0;
let largeFileUpload = {}
// let token = null
let localFile = null //需要上传的文件
// let chunkFile

self.addEventListener('message', function(event) {
    console.log('Received message:', event.data);
});

self.onmessage = function (e) {
    console.log("进入worker");
    if (e.data.cmd === 1) {
        // token = e.data.token
        msgInfo.uid = e.data.uid
        largeFileUpload.totalSize = JSON.stringify(e.data.file.size)
        localFile = e.data.file
        computeMD5(e.data.file)
    } else if (e.data.cmd === 2) {
        isDelete = true
        if (workStatus === 4 || workStatus === 5) {
            sendStatus(6)
        }
    }
}

function sendStatus(status, md5 = '',filePath='') {
    msgInfo.type = msgType.STATUS
    workStatus = status
    msgInfo.status = status;
    msgInfo.md5 = md5;//获取MD5
    msgInfo.filePath = filePath
    self.postMessage(msgInfo)
}

function sendProgress(progress) {
    msgInfo.type = msgType.PROGRESS
    msgInfo.progress = progress;//获取MD5
    self.postMessage(msgInfo)
}

function uploadFile() {
    console.log("uploadFile")
    console.log("上传信息" + JSON.stringify(largeFileUpload))
    const params = { largeFileUpload: JSON.stringify(largeFileUpload) }

    getLargeFileInfo(params)
    .then(res => {
        console.log("反馈:" + JSON.stringify(res));
        largeFileUpload = res.data.largeFileUpload
        console.log("通过md5获取上传信息" + JSON.stringify(largeFileUpload));
        if (largeFileUpload.uploadStatus === "1"){
            sendStatus(4,largeFileUpload.md5,largeFileUpload.absolutePath)
            return
        }
        let chunkSize = 1024 * 1024 * 10 //单次上传块大小
  
        let loadNext = () => {
            let start = largeFileUpload.uploadedSize === '0' ? 0 : largeFileUpload.uploadedSize - 1,
                end = ((start + chunkSize) >= localFile.size-1) ? localFile.size : start + chunkSize

                console.log("截取文件位置" + start + "," + end);

            let chunkFile = localFile.slice(start, end);

            console.log("发送数据" + JSON.stringify(largeFileUpload));
            console.log("截取文件块大小" + chunkFile.size);
            const json = JSON.stringify(largeFileUpload);
            // 将 json 字符串转化为 Blob 对象
            const blob = new Blob([json], {
                type: 'application/json',
            });
            const formData = new FormData();
            formData.append("chunkFile", chunkFile);
            formData.append("largeFileUpload", blob);
            
            upLargeFileData(formData)
            .then(res => {
                largeFileUpload = res.data.largeFileUpload;
                console.log("已上传长度:" + largeFileUpload.uploadedSize)
                sendProgress(Number((parseInt(largeFileUpload.uploadedSize) * 100 / parseInt(largeFileUpload.totalSize)).toFixed(2)))
                if (parseInt(largeFileUpload.uploadedSize) === localFile.size || largeFileUpload.uploadStatus === '1') {
                    sendStatus(4,largeFileUpload.md5,largeFileUpload.filePath)
                    sendProgress(100)
                } else {
                     loadNext() //进行下一步处理
                }
            }).catch(error => {
                console.log(error)
            })
        }
        loadNext();
    })
}

function computeMD5(file) {
    console.log("进入worker:computeMD5");
    sendStatus(2)
    let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
        chunkSize = 1024 * 1024 * 10, //2MB
        chunks = Math.ceil(file.size / chunkSize),
        currentChunk = 0,
        spark = new SparkMD5.ArrayBuffer(),
        fileReader = new FileReader();

    //let time = new Date().getTime();
    fileReader.onload = (e) => {
        spark.append(e.target.result);
        currentChunk++;
        if (currentChunk < chunks) {
            sendProgress(Number((currentChunk * chunkSize * 100 / file.size).toFixed(2)))
            //console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1} / ${chunks}分片解析`);
            if (isDelete) {
                spark.destroy(); //释放缓存
                sendStatus(6)
            } else {
                loadNext();
            }
        } else {
            let md5 = spark.end() //得到md5
            largeFileUpload.md5 = md5
            spark.destroy(); //释放缓存
            //console.log(`MD5计算完成：${file.name} \nMD5：${md5} \n分片：${chunks} 大小:${file.size} 用时：${new Date().getTime() - time} ms`);
            sendProgress(100)
            sendStatus(3, md5)
            uploadFile()
        }
    }
    fileReader.onerror = () => {
        spark.destroy(); //释放缓存
        sendStatus(5)
        console.warn('oops, something went wrong.');
    }
    let loadNext = () => {
        let start = currentChunk * chunkSize,
            end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize
        fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
    }
    loadNext();
}