相信各位在前端面试的时候都经常被问到“如何实现大文件分片上传”,甚至还有在线的coding编程题,希望面试的人手动实现一个大文件上传的demo。
笔者整理过去在项目里做的一个通用视频上传插件的时候,恰好有遇到这种问题,本文将抽出主体的实现思路,实现大文件上传的前后端demo。服务端的简易代码用node搭了一下,有兴趣的同学可以复制下来自己跑一下。
前端用户发起上传文件请求:
async function uploadFile(file) {
const chunks = sliceFile(file, CHUNK_SIZE); // 1. 文件分片处理
const fileId = Date.now().toString(); // 生成唯一文件ID
const res = await checkUploadedChunks(fileId); // 2. 检查已上传分片
const uploadedChunks = res.uploadedChunks;
const fileName = file.name;
for (let i = 0; i < chunks.length; i++) {
if (!uploadedChunks.includes(i)) {
// 断点续传逻辑:如果某个分片已经上传,则跳过该分片的上传过程
await uploadChunk(chunks[i], i, fileId); // 3. 上传文件分片
const progress = ((i + 1) / chunks.length) * 100; // 计算上传进度
document.getElementById('progress').innerText = `上传进度: ${progress.toFixed(2)}%`;
}
}
await mergeChunks(fileId, chunks.length, fileName); // 通知服务器合并已上传的代码
document.getElementById('progress').innerText = '上传完成';
}
/**
* 将文件切割为指定大小的分片
* @param {File} file 原始文件
* @param {number} chunkSize 分片大小(字节)
* @returns {Blob[]} 分片数组
*/
function sliceFile(file, chunkSize) {
const chunks = [];
let start = 0;
while (start < file.size) {
const chunk = file.slice(start, start + chunkSize);
chunks.push(chunk);
start += chunkSize;
}
return chunks;
}
/**
* 检查已上传的分片
* @param {string} fileId 文件唯一标识
* @returns {Promise}
*/
async function checkUploadedChunks(fileId) {
const response = await fetch(`http://localhost:3000/check-upload?fileId=${fileId}`);
if (!response.ok) {
throw new Error('检查分片失败');
}
return response.json();
}
/**
* 上传文件分片
* @param {Blob} chunk 文件分片
* @param {number} chunkIndex 分片索引
* @param {string} fileId 文件唯一标识
* @returns {Promise}
*/
async function uploadChunk(chunk, chunkIndex, fileId) {
const formData = new FormData();
formData.append('chunkIndex', chunkIndex);
formData.append('fileId', fileId);
formData.append('file', chunk);
const response = await fetch('http://localhost:3000/upload', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error('上传失败');
}
return response.json();
}
/**
* 合并分片
* @param {string} fileId 文件唯一标识
* @param {number} totalChunks 总分片长度
* @param {string} fileName 文件名称
* @returns {Promise}
*/
async function mergeChunks(fileId, totalChunks, fileName) {
const response = await fetch('http://localhost:3000/merge', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ fileId, totalChunks, fileName }),
});
if (!response.ok) {
throw new Error('合并失败');
}
return response.json();
}
const express = require('express')
const multer = require('multer')
const fs = require('fs')
const path = require('path')
const cors = require('cors')
const app = express()
const port = 3000
// 设置上传目录
const uploadDir = path.join(__dirname, 'uploads')
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir)
}
// 配置 multer
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, uploadDir)
},
filename: (req, file, cb) => {
const fileId = req.body.fileId
const chunkIndex = req.body.chunkIndex
console.log('fileId:', fileId)
console.log('chunkIndex:', chunkIndex)
cb(null, `${fileId}-${chunkIndex}`)
}
})
const upload = multer({ storage })
// 启用 CORS
app.use(cors())
// 上传分片
app.post('/upload', upload.single('file'), (req, res) => {
const fileId = req.body.fileId
const chunkIndex = req.body.chunkIndex
console.log(`Received chunk ${chunkIndex} for file ${fileId}`)
// 设置multipart/form-data响应头
res.setHeader('Content-Type', 'multipart/form-data')
res.json({ success: true, fileId, chunkIndex })
})
// 检查已上传的分片
app.get('/check-upload', (req, res) => {
const fileId = req.query.fileId
fs.readdir(uploadDir, (err, files) => {
if (err) {
console.error('Error reading upload directory:', err)
return res.status(500).json({ error: 'Internal server error' })
}
const uploadedChunks = files
.filter(filename => filename.startsWith(fileId))
.map(filename => parseInt(filename.split('-')[1]))
res.json({ uploadedChunks })
})
})
// 合并分片
app.post('/merge', express.json(), (req, res) => {
const fileId = req.body.fileId
const totalChunks = req.body.totalChunks
const fileName = req.body.fileName
const mergedFilePath = path.join(uploadDir, `${fileName}`)
const writeStream = fs.createWriteStream(mergedFilePath)
const chunks = []
for (let i = 0; i < totalChunks; i++) {
const chunkPath = path.join(uploadDir, `${fileId}-${i}`)
if (fs.existsSync(chunkPath)) {
chunks.push(chunkPath)
}
}
chunks.sort((a, b) => {
const aIndex = parseInt(a.split('-')[1])
const bIndex = parseInt(b.split('-')[1])
return aIndex - bIndex
})
chunks.forEach(chunkPath => {
const chunkData = fs.readFileSync(chunkPath)
writeStream.write(chunkData)
fs.unlinkSync(chunkPath) // 删除分片文件
})
writeStream.end()
console.log(`File ${fileId} merged successfully`)
res.json({ success: true, fileId })
})
// 启动服务器
app.listen(port, () => {
console.log(`Server is running on http://localhost:${port}`)
})