import SparkMD5 from 'spark-md5';
import {axiosFile} from '@/utils/request'

// 定义回调选项
const CallbackOption = {
    beforeInitFile: function (file) {
    },
    afterInitFile: function (file, fileState) {
    },
    beforeUploadPartList: function (file, fileState, index, chunkFile) {
    },
    afterUploadPartList: function (file, fileState, index, chunkFile) {
    },
    beforeCompleteFile: function (file, fileState, partMd5List) {
    },
    afterCompleteFile: function (file, fileState, partMd5List) {
    },
    fileUploadFinish: function (file, fileState) {
    }
};

// 定义选项
const Option = {
    authorization: '',
    isPrivate: false,
    concurrentLimit: 5,
    downloadURL: '/storage/download/',
    initURL: '/storage/upload/init',
    shardURL: '/storage/upload/sharding',
    completeURL: '/storage/upload/complete/'
};

// 定义文件分片
const FilePart = {
    startPosition: 0,
    endPosition: 0,
    uploadId: '',
    url: ''
};

// 定义文件状态
const FileState = {
    partList: []
};

// 定义文件类型
const FileType = {
    file: null,
    fileMd5: '',
    fileSize: 0,
    fileName: ''
};

let _state = {
    partList: []
};
let _defaultCallbackOption;
let _defaultOption = {
    authorization: '',
    isPrivate: false,
    concurrentLimit: 5,
    downloadURL: '/storage/download/',
    initURL: '/storage/upload/init',
    shardURL: '/storage/upload/sharding',
    completeURL: '/storage/upload/complete/'
};
let _file = {};

async function uploadFile(file, option, callback) {
    _initData();
    _defaultOption = {..._defaultOption, ...option};
    _defaultCallbackOption = callback;
    await _initFile(file);
    return Promise.resolve(_state);
}

async function _initData() {
    _defaultOption = {
        authorization: '',
        isPrivate: false,
        concurrentLimit: 5,
        downloadURL: '/storage/download/',
        initURL: '/storage/upload/init',
        shardURL: '/storage/upload/sharding',
        completeURL: '/storage/upload/complete/'
    };
    _defaultCallbackOption = undefined;
    _state = {
        partList: []
    };
    _file = {};
}

async function _initFile(file) {
    _file.file = file;
    _file.fileMd5 = await _getFileMd5(file);
    _file.fileSize = file.size;
    _file.fileName = file.name;

    if (_defaultCallbackOption.beforeInitFile) {
        _defaultCallbackOption.beforeInitFile(_file);
    }
    try {
        const initResponse = await axiosFile({
            url: _defaultOption.initURL,
            method: 'POST',
            data: {
                fileMd5: _file.fileMd5,
                fullFileName: _file.fileName,
                fileSize: _file.fileSize,
                isPrivate: _defaultOption.isPrivate,
            },
        });

        const initData = await initResponse;
        _state = {..._state, ...initData.data};
        if (_defaultCallbackOption.afterInitFile) {
            _defaultCallbackOption.afterInitFile(_file, _state);
        }
        if (initData.data.isDone) {
            if (_defaultCallbackOption.fileUploadFinish) {
                _defaultCallbackOption.fileUploadFinish(_file, _state);
            }
            return;
        }
        await _controlConcurrentRequests();
        await _completeUpload();
    } catch (err) {
        throw new Error(err);
    }
}

async function _controlConcurrentRequests() {
    const concurrentLimit = 5;
    const results = [];
    let index = 0;

    while (index < _state.partList.length) {
        const currentBatch = _state.partList.slice(index, index + concurrentLimit);
        const requests = currentBatch.map((item, idx) => new Promise((res, rej) => {
            const _chunkFile = _file.file.slice(item.startPosition, item.endPosition);
            if (_defaultCallbackOption.beforeUploadPartList) {
                _defaultCallbackOption.beforeUploadPartList(_file, _state, index + idx, _chunkFile);
            }
            fetch(item.url, {
                method: 'PUT',
                body: _chunkFile,
            }).then(() => {
                if (_defaultCallbackOption.afterUploadPartList) {
                    _defaultCallbackOption.afterUploadPartList(_file, _state, index + idx, _chunkFile);
                }
                res(_state);
            }).catch((err) => {
                rej(err);
            });
        }));
        const responses = await Promise.allSettled(requests);
        const groupResults = responses.map((response) => {
            if (response.status === 'fulfilled') {
                return response.value;
            } else {
                return response.reason;
            }
        });
        results.push(groupResults);
        index += concurrentLimit;
    }
    return results;
}

async function _completeUpload() {
    const shardResponse = await axiosFile({
        url: _defaultOption.shardURL,
        method: 'POST',
        data: {fileSize: _file.fileSize}
    });
    const shardData = await shardResponse;
    const partMd5List = [];
    for (let i = 0; i < _state.partCount; i++) {
        const item = shardData.data.partList[i];
        const _chunkFile = _file.file.slice(item.startPosition, item.endPosition);
        const partMd5 = await _getFileMd5(_chunkFile);
        partMd5List.push(partMd5);
    }
    if (_defaultCallbackOption.beforeCompleteFile) {
        _defaultCallbackOption.beforeCompleteFile(_file, _state, partMd5List);
    }
    await axiosFile({
        url: `${_defaultOption.completeURL}${_state.fileKey}`,
        method: 'POST',
        data: {partMd5List: partMd5List},
    });
    if (_defaultCallbackOption.afterCompleteFile) {
        _defaultCallbackOption.afterCompleteFile(_file, _state, partMd5List);
    }
    if (_defaultCallbackOption.fileUploadFinish) {
        _defaultCallbackOption.fileUploadFinish(_file, _state);
    }
}

/**
 * 获取文件MD5
 * @param file
 * @returns {Promise<string>}
 */
function _getFileMd5(file) {
    const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
    const chunkSize = 2097152;
    const chunks = Math.ceil(file.size / chunkSize);
    const spark = new SparkMD5.ArrayBuffer();
    const fileReader = new FileReader();
    let currentChunk = 0;

    function loadNext() {
        var start = currentChunk * chunkSize,
            end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;

        fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
    }

    loadNext();

    return new Promise((resolve, reject) => {
        fileReader.onload = (e) => {
            spark.append(e.target.result);
            currentChunk++;

            if (currentChunk < chunks) {
                loadNext();
            } else {
                resolve(spark.end(false));
            }
        };
        fileReader.onerror = (err) => {
            reject(err);
        };
    });
}

export default {
    uploadFile
};
