The code address

What is breakpoint continuation?

When using ordinary file uploads, when the network is disconnected or some other problem causes file uploads to stop, the server will start from scratch again. Small files are not a big problem, but large files are a waste of resources. The solution to this problem is resumable, which literally means to continue the transfer at the point where the download broke off without having to start from scratch.

The principle of

When uploading a file, you can divide it into multiple blocks by BLOB. At the end of the uploading process, these blocks are merged into a single file. During this process, if the file upload stops, the next time you upload the file, you get the block index of the last file, and then upload it into a single file.

The environment

The back-end

Formidable File upload module Express Web framework

The front end

Axios Request interface spark-md5 INDICATES the MD5 encryption

Create a project

The front-end code is index.html

view:

<div class="upload"> <h3> Large file upload </h3> <form> <div class="upload-file"> <label for="file"> Please select file </label> <input type="file" Name ="file" id="big-file" Accept ="application/*"> </div> 0;" id="big-current"></span> </p> </div> </form> </div>Copy the code

css:

body { margin: 0; font-size: 16px; background: #f8f8f8; } h1,h2,h3,h4,h5,h6,p { margin: 0; } /* * { outline: 1px solid pink; } */ .upload { box-sizing: border-box; margin: 30px auto; padding: 15px 20px; width: 500px; height: auto; border-radius: 15px; background: #fff; } .upload h3 { font-size: 20px; line-height: 2; text-align: center; } .upload .upload-file { position: relative; margin: 30px auto; } .upload .upload-file label { display: flex; justify-content: center; align-items: center; width: 100%; height: 150px; border: 1px dashed #ccc; } .upload .upload-file input { position: absolute; top: 0; left: 0; width: 100%; height: 100%; opacity: 0; } .upload-progress { display: flex; align-items: center; } .upload-progress p { position: relative; display: inline-block; flex: 1; height: 15px; border-radius: 10px; background: #ccc; overflow: hidden; } .upload-progress p span { position: absolute; left: 0; top: 0; width: 0; height: 100%; background: linear-gradient(to right bottom, rgb(163, 76, 76), rgb(231, 73, 52)); transition: all .4s; } .upload-link { margin: 30px auto; } .upload-link a { text-decoration: none; color: rgb(6, 102, 192); } @media all and (max-width: 768px) { .upload { width: 300px; }}Copy the code

js:

< script SRC = "https://cdn.bootcdn.net/ajax/libs/axios/0.21.1/axios.min.js" > < / script > < script SRC = "https://cdn.bootcdn.net/ajax/libs/spark-md5/3.0.0/spark-md5.min.js" > < / script > < script > const bigFile = document.querySelector('#big-file'); let bigCurrent = document.querySelector('#big-current'); let bigLinks = document.querySelector('#big-links'); let fileArr = []; let md5Val = ''; let ext = ''; bigFile.addEventListener('change', (e) => { let file = e.target.files[0]; let index = file.name.lastIndexOf('.') ext = file.name.substr(index + 1) if (file.type.indexOf('application') == -1) { Return alert(' File format must be document application only! '); } if ((file.size/(1000*1000)) > 100) {return alert(' File must not be larger than 100MB! '); } this.uploadBig(file); }, false); Async function uploadBig(file){let chunkIndex = 0 fileArr = sliceFile (file) md5Val = await md5File(fileArr) // Let data = await axios({url: '${baseUrl}/big? type=check&md5Val=${md5Val}&total=${fileArr.length}`, method: 'post', }) if (data.data.code == 200) { chunkIndex = data.data.data.data.chunk.length ? data.data.data.data.chunk.length - 1 : 0 console.log('chunkIndex', Function sliceFile (file) {const files = []; function sliceFile (file) {const files = []; const chunkSize = 128*1024; for (let i = 0; i < file.size; i+=chunkSize) { const end = i + chunkSize >= file.size ? file.size : i + chunkSize; let currentFile = file.slice(i, (end > file.size ? file.size : end)); files.push(currentFile); } return files; Function md5File (files) {const spark = new sparkmd5.arrayBuffer (); let fileReader; for (var i = 0; i < files.length; i++) { fileReader = new FileReader(); fileReader.readAsArrayBuffer(files[i]); } return new Promise((resolve) => { fileReader.onload = function(e) { spark.append(e.target.result); if (i == files.length) { resolve(spark.end()); Async function uploadSlice (chunkIndex = 0) {let formData = new formData (); formData.append('file', fileArr[chunkIndex]); let data = await axios({ url: `${baseUrl}/big? type=upload&current=${chunkIndex}&md5Val=${md5Val}&total=${fileArr.length}`, method: 'post', data: formData, }) if (data.data.code == 200) { if (chunkIndex < fileArr.length -1 ){ bigCurrent.style.width = Math.round((chunkIndex+1)  / fileArr.length * 100) + '%'; ++chunkIndex; uploadSlice(chunkIndex); } else { mergeFile(); Async function mergeFile () {let data = await axios.post(' ${baseUrl}/big? type=merge&md5Val=${md5Val}&total=${fileArr.length}&ext=${ext}`); If (data.data.code == 200) {alert(' Upload successfully! '); bigCurrent.style.width = '100%'; bigLinks.href = data.data.data.url; } else { alert(data.data.data.info); } } </script>Copy the code

The back end code index.js

const express = require('express'); const formidable = require('formidable'); const path = require('path'); const fs = require('fs'); const baseUrl = 'http://localhost:3000/file/doc/'; Const dirPath = path.join(__dirname, '/static/') const app = express() next) { res.header('Access-Control-Allow-Origin', '*') res.header('Access-Control-Allow-Headers', 'Content-Type') res.header('Access-Control-Allow-Methods', '*'); res.header('Content-Type', 'application/json; charset=utf-8') next(); }); app.post('/big', async function (req, res){ let type = req.query.type; let md5Val = req.query.md5Val; let total = req.query.total; let bigDir = dirPath + 'big/'; let typeArr = ['check', 'upload', 'merge']; if (! Type) {return res.json({code: 101, MSG: 'get_fail', data: {info: 'Upload type cannot be empty! ' } }) } if (! Md5Val) {return res.json({code: 101, MSG: 'get_fail', data: {info: 'MD5 value of file cannot be empty! ' } }) } if (! Total) {return res.json({code: 101, MSG: 'get_fail', data: {info: 'The number of file slices cannot be empty! ' } }) } if (! Typearr.includes (type)) {return res.json({code: 101, MSG: 'get_fail', data: {info: 'Upload type error! ' } }) } if (type === 'check') { let filePath = `${bigDir}${md5Val}`; fs.readdir(filePath, (err, data) => { if (err) { fs.mkdir(filePath, (err) => { if (err) { return res.json({ code: 101, MSG: 'get_fail', data: {info: 'get failed! ', err}})} else {return res.json({code: 200, MSG: 'get_succ', data: {info: 'Obtained successfully! ', data: { type: 'write', chunk: [], total: 0 } } }) } }) } else { return res.json({ code: 200, msg: 'get_succ', data: {info: 'Obtained successfully! ', data: { type: 'read', chunk: data, total: data.length } } }) } }); } else if (type === 'upload') { let current = req.query.current; if (! Json ({code: 101, MSG: 'get_fail', data: {info: 'File fragment value cannot be null! ' } }) } let form = formidable({ multiples: true, uploadDir: `${dirPath}big/${md5Val}/`, }) form.parse(req, (err,fields, files)=> { if (err) { return res.json(err); } let newPath = `${dirPath}big/${md5Val}/${current}`; fs.rename(files.file.path, newPath, function(err) { if (err) { return res.json(err); } return res.json({ code: 200, msg: 'get_succ', data: { info: 'upload success! '}})})}); } else { let ext = req.query.ext; if (! Ext) {return res.json({code: 101, MSG: 'get_fail', data: {info: 'File suffix cannot be empty! ' } }) } let oldPath = `${dirPath}big/${md5Val}`; let newPath = `${dirPath}doc/${md5Val}.${ext}`; let data = await mergeFile(oldPath, newPath); If (data.code == 200) {return res.json({code: 200, MSG: 'get_succ', data: {info: 'File merge successfully! ${baseUrl}${md5Val}.${ext} '}})} else {return res.json({code: 101, MSG: 'get_fail', data: {info: 'File merge failed! ', err: Function mergeFile (filePath, newPath) {return new Promise((resolve, resolve)) reject) => { let files = fs.readdirSync(filePath), newFile = fs.createWriteStream(newPath); let filesArr = arrSort(files).reverse(); main(); function main (index = 0) { let currentFile = filePath + '/'+filesArr[index]; let stream = fs.createReadStream(currentFile); stream.pipe(newFile, {end: false}); stream.on('end', function () { if (index < filesArr.length - 1) { index++; main(index); } else { resolve({code: 200}); } }) stream.on('error', function (error) { reject({code: Function arrSort (arr) {for (let I = 0; i < arr.length; i++) { for (let j = 0; j < arr.length; j++) { if (Number(arr[i]) >= Number(arr[j])) { let t = arr[i]; arr[i] = arr[j]; arr[j] = t; } } } return arr; } app.listen(3000, ()=>{ console.log('http://localhost:3000/') })Copy the code