import SparkMD5 from "spark-md5";

export function fileParse(file, type = "base64") {
    return new Promise(resolve => {
        let fileRead = new FileReader();
        if (type === "base64") {
            fileRead.readAsDataURL(file);
        } else if (type === "buffer") {
            fileRead.readAsArrayBuffer(file);
        }
        fileRead.onload = (ev) => {
            resolve(ev.target.result);
        };
    });
};

export function getFileMd5(file, DefualtChunkSize) {
    return new Promise(resolve => {
        let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
            chunks = Math.ceil(file.size / DefualtChunkSize),
            currentChunk = 0,
            spark = new SparkMD5.ArrayBuffer(),
            fileReader = new FileReader();

        fileReader.onload = function (e) {
            console.log('read chunk nr', currentChunk + 1, 'of');

            const chunk = e.target.result;
            spark.append(chunk);
            currentChunk++;
            console.log("currentChunk < chunks ?" + (currentChunk < chunks))
            if (currentChunk < chunks) {
                loadNext();
            } else {
                let fileHash = spark.end();
                resolve(fileHash);
                console.info('finished computed hash', fileHash);
            }
        };

        fileReader.onerror = function () {
            console.warn('oops, something went wrong.');
        };

        function loadNext() {
            let start = currentChunk * DefualtChunkSize,
                end = ((start + DefualtChunkSize) >= file.size) ? file.size : start + DefualtChunkSize;
            let chunk = blobSlice.call(file, start, end);
            fileReader.readAsArrayBuffer(chunk);
        }

        loadNext();
    });
};