createChunk.js
Spark-md5计算文件各分片MD5生成文件指纹
可以帮助我们更加方便地进行文件哈希计算和文件完整性检测等操作。
import sparkMd5 from './sparkmd5.js'export function createChunk(file, index, chunkSize) {return new Promise((resolve, reject) => {const start = index * chunkSize;const end = start + chunkSize;const spark = new sparkMd5.ArrayBuffer();const fileReader = new FileReader();const blob = file.slice(start, end);fileReader.onload = (e) => {spark.append(e.target.result);//耗时阻塞resolve({start, end, index, hash: spark.end(), blob});};fileReader.readAsArrayBuffer(blob);})}
cutFile.js
import {createChunk} from './createChunk.js'const CHUNK_SIZE = 1024 * 1024 * 5;export async function cutFile(file) {const result = [];/* 普通写法 速度慢 md5阻塞*/// const chunkCount = Math.ceil(file.size / CHUNK_SIZE);// for (let i = 0; i < chunkCount; i++) {// const chunk = await createChunk(file, i, CHUNK_SIZE);// result.push(chunk)// }new Promise((resolve, reject) => {const finishCount = 0;//完成数量const chunkCount = Math.ceil(file.size / CHUNK_SIZE);/* 多线程写法 */const THREAD_COUNT = navigator.hardwareConcurrency || 4;//获取线程数const threadChunkCount = Math.ceil(chunkCount / THREAD_COUNT)//分发计算每个线程的分片数量for (let i = 0; i < threadChunkCount; i++) {//创建多线程,并分配任务const worker = new Worker('./worker.js', { type: 'module' });let end = (i + 1) * threadChunkCount;const start = i * threadChunkCount;if (end > chunkCount) {end = chunkCount}worker.postMessage({file,CHUNK_SIZE,startChunkIndex: start,endChunkIndex: end});worker.onmessage = e => {for (let i = start; i < end; i++) {result[i] = e.data[i - start]//分片结果以次放到对应下标}if (finishCount === THREAD_COUNT) {resolve(result)}}}})}
worker.js 线程文件
onmessage = async (e) => {const {file,CHUNK_SIZE,startChunkIndex: start,endChunkIndex: end,} =e.data;const proms=[];for(let i=start;i<end;i++){proms.push(createChunk(file,i,CHUNK_SIZE));}const t= await Promise.all(proms);//等待分片完成postMessage(t)//分片结果传递给主线程};
应用
//oncheange 上传文件的按钮import {cutFile} from './cutFile.js'async function oncheange(e) {const file = e.target.files[0];const chunks = await cutFile(file)}