import SparkMD5 from 'spark-md5';
import {
	chunkSize
} from './config';

export default function(file, options) {
	//计算MD5
	let currentChunk = 0;
	let fileReader = new FileReader();
	let spark = new SparkMD5.ArrayBuffer();
	let chunks = Math.ceil(file.size / chunkSize);
	let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
	if (!!options.init && typeof options.init === 'function') {
		options.init();
	}
	loadNext();
	fileReader.onload = e => {
		spark.append(e.target.result);
		if (currentChunk < chunks) {
			currentChunk++;
			loadNext();
			// 实时展示MD5的计算进度
			if (!!options.progress && typeof options.progress === 'function') {
				options.progress(((currentChunk / chunks) * 100).toFixed(0))
			}
		} else {
			let md5 = spark.end();
			if (!!options.finish && typeof options.finish === 'function') {
				options.finish(md5);
			}
		}
	};
	fileReader.onerror = function() {
		this.error(`文件${file.name}读取出错，请检查该文件`);
		file.cancel();
	};

	function loadNext() {
		let start = currentChunk * chunkSize;
		let end = start + chunkSize >= file.size ? file.size : start + chunkSize;
		fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end));
	}
}
