import SparkMD5 from "spark-md5";
const CHUNK_SIZE = 1024 * 1024 * 10 // 10MB
export async function cutFile(entry) {
	console.log('cutFile');
	return new Promise(resolve => {
		console.log('cutFile Promise');
		entry.file(function(file) {
			console.log('1111');
			var fileReader = new plus.io.FileReader();
			console.log('2222');
			fileReader.readAsDataURL(file, 'utf-8');
			console.log('3333  变化了');
			fileReader.onloadstart = function(event) {
				console.log(event);
			}
			fileReader.onloadend = function(evt) {
				console.log(evt);
			}
			fileReader.onload = function(event) {
				console.log(event);
			}
			fileReader.onabort = function(event) {
				console.log(event);
			}
			fileReader.onerror = function(event) {
				console.log(event);
			}
			console.log(file.size + '--' + file.name);
		}, function(e) {
			console.log(e);
		});
	})
}

function createChunks(file, chunkSize) {
	return new Promise((resolve, reject) => {
		try {
			// 开始第几个*分片的大小
			const start = index * chunkSize;
			//   结束时start + 分片的大小
			const end = start + chunkSize;
			const fileReader = new FileReader();
			const spark = new SparkMD5();
			// 读取文件的分片 读取完成后触发onload事件
			fileReader.onload = (e) => {
				// const spark = Md5(e.target.result)
				spark.append(e.target.result);
				const files = file.slice(start, end);
				resolve({
					start,
					end,
					index,
					hash: spark.end(),
					files,
				});
			};
			// 读取文件的分片
			fileReader.readAsArrayBuffer(file.slice(start, end));
		} catch (err) {
			console.log('err');
			reject(err)
		}
	});
	// const result = {}
	// for (let i = 0; i < file.size; i += chunkSize) {
	// 	result.push(file.slice(i, i + chunkSize))
	// }
	// return result
}