import SparkMD5 from 'spark-md5'
import Sha1 from 'sha-1'
import Axios from '../config/axios'
import store from '../store/index'
import { PROGRESS } from '../store/mutation-types.js'

// import Axios from 'axios'
function uploader(){

}

// uploader.prototype.onProgress= function(es){
//
// }
uploader.prototype.index = 0;

uploader.prototype.fileObj= {
    upLoadSize : 0,
    filePath:'/bai',
    percentage:0,
    chunks:[],
    file:'',
};
uploader.prototype.opt = {
    // blodSize: 1024 * 1024 * 10, //文件分片大小
    // method: 'POST', //请求方式
    // fileObjName: 'file', //后台接收的文件名（后台使用spring mvc时此项不必要）
    // uploadUrl: '/upload', //文件上传地址，
    // mergeUrl: '/merge', //文件上传地址，
    // getUploadedFile:'/seckill', //获取文件是否上传
    // getUploadedSizeUrl: 'http://localhost:8080/file/uploadSize',//获取文件已上传大小地址
    // getUploadFileInfoUrl: 'http://localhost:8080/file/getUploadFileInfo',
    // getRealFileNameUrl: 'http://localhost:8080/file/getRealFileName',
    // delTempFileUrl: 'http://localhost:8080/file/delTempFile',
    // hash: "",
    // retry:3
};
uploader.prototype.uploadParam = {
    uploadFileId:'',
    blockMD5:'',
    blockHash:'',
    blockSize:1024*1024,
    chunks:'',
    chunk:'',
    start:'',
    end:'',
    file:''
};

uploader.prototype.onChange=function(e,id){
    // this.opt.hash="";//重新计算哈希值
    let fileArry = [];
    for(let i = 0; i < e.target.files.length; i++){
        let file = e.target.files[i] /*e.target.files[0]*/;
        fileArry.push({'i':i,'file':file});
        this.getUploadFile(file,fileArry,this.fileObj);
    }
}


//检测是否上传过
uploader.prototype.getUploadFile = function(file,fileArry,fileObj){
    let fileReader = new FileReader();
    fileReader.readAsArrayBuffer(file);
    let md5 = '';
    let sha1 = '';
    let that = this;
    let id = '';
    for(let j in  fileArry){
        if(fileArry[j].file === file){
            id = fileArry[j].i;
        }
    }
    fileReader.onload = (e => {
        if (file.size != e.target.result.byteLength) {
            // this.error('Browser reported success but could not read the file until the end.');
            return
        }

        md5 = SparkMD5.ArrayBuffer.hash(e.target.result);
        sha1 = Sha1(e.target.result);
        let params = {
            fileName:fileObj.name,
            fileSize:fileObj.size,
            filePath :fileObj.filePath,
            md5:md5,
            hash:sha1,
            id:id
        };
        Axios({
            method: 'get',
            url: this.opt.getUploadedFile,
            baseURL:"/file",
            params:params,
            timeout: 90000,
        }).then(function (response){
            if(response.data.code == '200'){
                if(!response.data.data.exist){
                   /* that.uploadParam.uploadFileId = response.data.data.uploadFileId;*/
                    for(let j in fileArry){
                        if(fileArry[j].i === response.config.data.get('id')){
                            that.uploadFile(fileArry[j].file,response.data.data.uploadFileId,that);
                        }
                    }
                }
            }
        }).catch(function(err){

        })
    });

    fileReader.onerror = function () {
        /*this.error('FileReader onerror was triggered, maybe the browser aborted due to high memory usage.');*/
    };

}

uploader.prototype.uploadFile =function(file,id,that){
    that.uploadParam.uploadFileId = id;
    let ws = new WebSocket(that.opt.webSocketUrl);
    let tempProgress=0;
    ws.onmessage = function(e){
        let progress = e.data;
/*        if(tempProgress!=0 && tempProgress < JSON.parse(progress).bytesRead){
             store.commit(PROGRESS,progress);
            uploader.opt.onProgress.call(progress);
            tempProgress = JSON.parse(progress).bytesRead;
        }
        if(tempProgress == 0){
            tempProgress = JSON.parse(progress).bytesRead;
        }*/
        that.onProgress.call(progress);
    }
    let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
    // let chunkSize = 1024*1024*100;                            // Read in chunks of 100MB
    let chunks = Math.ceil(file.size / that.uploadParam.blockSize);
    that.uploadParam.chunks = chunks;
    let currentChunk = 0;
    let spark = new SparkMD5.ArrayBuffer();
    let fileReader = new FileReader();
    // file = this.files[0];
    let sparkmd5 = spark.end();
    fileReader.onload = function (e) {
        // console.log('read chunk nr', currentChunk + 1, 'of', chunks);
        // spark.append(e.target.result);                   // Append array buffer
        that.uploadParam.blockMD5 = SparkMD5.ArrayBuffer.hash(e.target.result);
        console.log(e.target);
        // uploader.uploadParam.blockHash = Sha1(e.target.result);
        currentChunk++;
        that.uploadParam.chunk = currentChunk;
        let formData = new FormData();
        formData.append('uploadFileId',that.uploadParam.uploadFileId);
        formData.append('blockMD5',that.uploadParam.blockMD5);
        // formData.append('blockHash',uploader.uploadParam.blockHash);
        formData.append('blockSize',that.uploadParam.blockSize);
        formData.append('chunks',that.uploadParam.chunks);
        formData.append('chunk',that.uploadParam.chunk);
        formData.append('start',that.uploadParam.start);
        formData.append('end',that.uploadParam.end);
        formData.append('file',that.uploadParam.file);
        that.fileObj.chunks.push({'uploadFileId':that.uploadParam.uploadFileId,'currentChunk':currentChunk,
            'formData':formData, 'uploadTimes':-1,'success':false,'progress':0,'complete':''});
        console.log('currentChunk'+currentChunk+'chunks'+chunks);
        Axios({
            method: 'post',
            url: that.opt.uploadUrl,
            baseURL:"/chunks",
            data:formData,/*uploader.uploadParam*/
            headers: {'Content-Type': 'multipart/form-data'},
            timeout: 90000,
        }).then(function (response){
            console.log(response);
            if(response.data.code == '200'){
                let thisChunk = response.config.data.get('chunk');
                console.log("thisChunk"+thisChunk);
                that.fileObj.chunks[thisChunk-1].success = response.data.data;
                that.fileObj.chunks[thisChunk-1].uploadTimes++;
                // if (currentChunk < chunks) {
                //      loadNext();
                // }else{
                //     beforeMerge();
                // }
                let flag = true;
                for(let i in that.fileObj.chunks){
                    if(that.fileObj.chunks[i].uploadTimes < 0){
                        flag = false;
                        break;
                    }
                    if(i+1 == that.fileObj.chunks.length){
                        flag = true;
                    }
                }
                if(flag){
                    this.beforeMerge();
                }
            }
        }).catch(function(err){
            // let thisChunk = response.config.data.formData.get('chunk');
            // uploader.fileObj.chunks[thisChunk-1].success = false;
            // if (currentChunk < chunks) {
            //     loadNext();
            // }else{
            //     beforeMerge();
            // }
            // if(thisChunk == chunks){
            //     beforeMerge();
            // }
            let flag = true;
            for(let i in that.fileObj.chunks){
                if(that.fileObj.chunks[i].uploadTimes < 0){
                    flag = false;
                    break;
                }
                if(i+1 == that.fileObj.chunks.length){
                    flag = true;
                }
            }
            if(flag){
                that.beforeMerge(that);
            }
        })
        if (currentChunk < chunks) {
            loadNext();
        }

    };
    fileReader.onerror = function () {
        console.warn('oops, something went wrong.');
    };
    function loadNext() {
        let start = currentChunk * that.uploadParam.blockSize;
        let end = ((start + that.uploadParam.blockSize) >= file.size) ? file.size : start + 1024*1024;
        that.uploadParam.start = start;
        that.uploadParam.end = end;
        that.uploadParam.file =  file.slice(start,end);
        fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
    }
    loadNext();
}

uploader.prototype.beforeMerge = function(that){
    let i;
    for(i= 0; i < that.fileObj.chunks.length; i++){
        if(!that.fileObj.chunks[i].success){
            that.uploadAgain(that.fileObj.chunks[i],that);
            break;
        }
    }

    if(that.fileObj.chunks.length == i && that.fileObj.chunks.length > 1){
        debugger
        that.merge(that);
    }

}

uploader.prototype.uploadAgain = function (chunkInfo,that){
    Axios({
        method: 'post',
        url: that.opt.uploadUrl,
        baseURL:"/chunks",
        data:chunkInfo.formData,    /*uploader.uploadParam*/
        headers: {'Content-Type': 'multipart/form-data'},
        timeout: 90000,
    }).then(function (response){
        console.log(response);
        if(response.data.code == '200'){
            uploader.fileObj.chunks[chunkInfo.currentChunk-1].success = response.data.data;
            that.fileObj.chunks[chunkInfo.currentChunk-1].uploadTimes++;
            if(!response.data.data){
                if( that.fileObj.chunks[chunkInfo.currentChunk-1].uploadTimes >=  that.opt.retry){
                    console.log('文件单片上传尝试多次失败，文件上传失败');
                    return false;
                }else{
                    that.uploadAgain(that.fileObj.chunks[chunkInfo.currentChunk-1],that);
                }
            }else{
                console.log('文件单片上传尝试多次成功');
                that.beforeMerge(that);
                return true;
            }
        }
    }).catch(function(err){
        that.fileObj.chunks[chunkInfo.currentChunk-1].success = false;
        that.fileObj.chunks[chunkInfo.currentChunk-1].uploadTimes++;
        if( that.fileObj.chunks[chunkInfo.currentChunk-1].uploadTimes >=  that.opt.retry){
            return false;
        }else{
            that.uploadAgain(that.fileObj.chunks[chunkInfo.currentChunk-1],that);
        }
    })
}

uploader.prototype.merge = function(that){
    Axios({
        method: 'post',
        url: that.opt.mergeUrl,
        baseURL:"/chunks",
        data:{
            fileId:that.uploadParam.uploadFileId
        },
        timeout: 90000,
    }).then(function (response){
        console.log(response)
        that.fileObj.chunks=[];
    }).catch(function(err){
        that.fileObj.chunks=[];
    })
}

export default uploader;