123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197 |
- <!DOCTYPE html>
- <html lang="en">
- <head>
- <meta charset="UTF-8">
- <meta http-equiv="X-UA-Compatible" content="IE=edge">
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
- <title>BigFileUpload</title>
- </head>
- <body>
- <div class="upload-box">
- <h2>big file upload</h2>
- <div class="get-file">
- <input type="file" name="file" id="file">
- </div>
- <div class="upload-file">
- <button id="upload">上传文件</button>
- </div>
- </div>
- <script crossorigin="anonymous" integrity="sha512-bZS47S7sPOxkjU/4Bt0zrhEtWx0y0CRkhEp8IckzK+ltifIIE9EMIMTuT/mEzoIMewUINruDBIR/jJnbguonqQ==" src="https://lib.baomitu.com/axios/0.21.1/axios.min.js"></script>
- <script crossorigin="anonymous" integrity="sha384-45XT1VzQggQADTAenPH2Ecf0gLIwfiZ1J+nlE27AA9qXjtUXaplXshIamSqaco/e" src="https://lib.baomitu.com/spark-md5/3.0.0/spark-md5.js"></script>
- <script type="module">
- const file = document.querySelector('#file')
- const uploadBtn = document.querySelector('#upload')
- const DEFAULT_CHUNK_SIZE = 20 * 1024 * 1024; // 20MB
- const DEFAULT_OPTIONS = {
- chunkSize: DEFAULT_CHUNK_SIZE,
- };
- let uploadId
- class FileUploader {
- constructor(options) {
- this.fileUploaderClientOptions = Object.assign(DEFAULT_OPTIONS, options);
- }
- /**
- * 将file对象进行切片,然后根据切片计算md5
- * @param file 要上传的文件
- * @returns 返回md5和切片列表
- */
- async getChunkListAndFileMd5(
- file,
- ) {
- return new Promise((resolve, reject) => {
- let currentChunk = 0;
- const chunkSize = this.fileUploaderClientOptions.chunkSize;
- const chunks = Math.ceil(file.size / chunkSize);
- const spark = new SparkMD5.ArrayBuffer();
- const fileReader = new FileReader();
- const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
- const chunkList = [];
- fileReader.onload = function (e) {
- if (e.target.result instanceof ArrayBuffer) {
- spark.append(e.target.result);
- }
- currentChunk++;
- if (currentChunk < chunks) {
- loadNextChunk();
- } else {
- const computedHash = spark.end();
- resolve({ md5: computedHash, chunkList });
- }
- };
- fileReader.onerror = function (e) {
- console.warn('read file error', e);
- reject(e);
- };
- function loadNextChunk() {
- const start = currentChunk * chunkSize;
- const end = (start + chunkSize) >= file.size ? file.size : start + chunkSize;
- const chunk = blobSlice.call(file, start, end);
- chunkList.push(chunk);
- fileReader.readAsArrayBuffer(chunk);
- }
- loadNextChunk();
- });
- }
- /**
- * 上传文件方法,当FileUploaderClient的配置项中传入了requestOptions才能使用
- * 会依次执行getChunkListAndFileMd5、配置项中的initFilePartUploadFunc、配置项中的uploadPartFileFunc、配置项中的finishFilePartUploadFunc
- * 执行完成后返回上传结果,若有分片上传失败,则会自动重试
- * @param file 要上传的文件
- * @returns finishFilePartUploadFunc函数Promise resolve的值
- */
- async uploadFile(file) {
- const requestOptions = this.fileUploaderClientOptions;
- const { md5, chunkList } = await this.getChunkListAndFileMd5(file);
- const retryList = [];
- if (
- requestOptions.retryTimes === undefined ||
- !requestOptions.initFilePartUploadFunc ||
- !requestOptions.uploadPartFileFunc ||
- !requestOptions.finishFilePartUploadFunc
- ) {
- throw Error(
- 'invalid request options, need retryTimes, initFilePartUploadFunc, uploadPartFileFunc and finishFilePartUploadFunc',
- );
- }
- await requestOptions.initFilePartUploadFunc();
- // await requestOptions.uploadPartFileFunc(chunkList[0], 0);
- for (let index = 0; index < chunkList.length; index++) {
- try {
- await requestOptions.uploadPartFileFunc(chunkList[index], index);
- } catch (e) {
- console.warn(`${index} part upload failed`);
- retryList.push(index);
- }
- }
- for (let retry = 0; retry < requestOptions.retryTimes; retry++) {
- if (retryList.length > 0) {
- console.log(`retry start, times: ${retry}`);
- for (let a = 0; a < retryList.length; a++) {
- const blobIndex = retryList[a];
- try {
- await requestOptions.uploadPartFileFunc(
- chunkList[blobIndex],
- blobIndex,
- );
- retryList.splice(a, 1);
- } catch (e) {
- console.warn(
- `${blobIndex} part retry upload failed, times: ${retry}`,
- );
- }
- }
- }
- }
- if (retryList.length === 0) {
- return await requestOptions.finishFilePartUploadFunc(md5);
- } else {
- throw Error(
- `upload failed, some chunks upload failed: ${JSON.stringify(
- retryList,
- )}`,
- );
- }
- }
- }
- const fileUpload = new FileUploader({
- retryTimes: 2,
- initFilePartUploadFunc: async () => {
- const fileName = file.files[0].name
- const { data: { data } } = await axios.get('/api/upload-init', {
- params: {
- name: fileName,
- }
- })
- uploadId = data.uploadId
- console.log('初始化上传完成')
- },
- uploadPartFileFunc: async (chunk, index) => {
- const formData = new FormData()
- formData.append('uploadId', uploadId)
- formData.append('partIndex', index.toString())
- formData.append('partFile', chunk)
- const { data: { success, data } } = await axios.post('/api/upload-part', formData, {
- headers: { 'Content-Type': 'multipart/form-data' },
- })
- if (success) {
- console.log(data)
- console.log(`上传分片${data.index}完成`)
- }
- },
- finishFilePartUploadFunc: async (md5) => {
- const fileName = file.files[0].name
- const { data: { success, data } } = await axios.get('/api/upload-finish', {
- params: {
- name: fileName,
- uploadId,
- md5,
- }
- })
- if (success) {
- console.log(`上传完成,存储地址为:${data.path}`)
- const p = document.createElement('p')
- p.textContent = `文件地址:${data.path}`
- document.body.appendChild(p)
- }
- },
- })
- uploadBtn.addEventListener('click', () => {
- fileUpload.uploadFile(file.files[0])
- })
- </script>
- </body>
- </html>
|