前端大文件上传(Large File Upload)完整解决方案
前端大文件上传的实现方案,包括分片上传、断点续传、进度监控、错误处理等完整功能。
涉及技术:File API、Blob API、FileReader API、FormData API、XMLHttpRequest、Fetch API
参考文档:
- File API — 文件对象操作
- Blob API — 二进制大对象
- FileReader API — 文件读取
- FormData API — 表单数据和文件上传
📚 目录
1. 问题分析
1.1 大文件上传的挑战
传统上传方式的问题:
- 内存占用:大文件一次性加载到内存,可能导致浏览器崩溃
- 网络超时:上传时间过长,容易触发网络超时
- 上传失败:网络中断后需要重新上传整个文件
- 用户体验:无法显示上传进度,用户体验差
- 服务器压力:大文件上传对服务器造成压力
1.2 解决方案
分片上传(Chunk Upload):
- 将大文件分割成多个小片段
- 逐个上传文件片段
- 服务器合并文件片段
断点续传(Resume Upload):
- 记录已上传的片段
- 网络中断后从断点继续上传
- 避免重复上传
进度监控(Progress Monitoring):
- 实时显示上传进度
- 显示上传速度
- 预估剩余时间
2. 技术方案
2.1 技术栈
前端技术:
- File API:获取文件对象
- Blob API:文件切片
- FileReader API:读取文件内容(可选)
- XMLHttpRequest / Fetch API:发送请求
- Streams API:流式处理(可选)
后端技术:
- 接收文件片段
- 验证文件片段
- 合并文件片段
- 支持断点续传
2.2 实现流程
1. 选择文件
↓
2. 计算文件 Hash(MD5/SHA256)
↓
3. 检查服务器是否已有文件(秒传)
↓
4. 分割文件为多个片段
↓
5. 上传文件片段(并发/串行)
↓
6. 显示上传进度
↓
7. 所有片段上传完成后,通知服务器合并
↓
8. 上传完成
2.3 文件分片策略
分片大小:
- 推荐:2MB - 5MB
- 太小:请求次数过多,效率低
- 太大:网络中断后重传成本高
分片命名:
- 格式:
{fileHash}-{chunkIndex}-{totalChunks} - 示例:
abc123-0-10、abc123-1-10
3. 核心实现
3.1 文件切片
/**
* 文件切片
* @param {File} file - 文件对象
* @param {number} chunkSize - 切片大小(字节)
* @returns {Array<Blob>} 切片数组
*/
function createFileChunks(file, chunkSize = 2 * 1024 * 1024) {
const chunks = [];
let start = 0;
while (start < file.size) {
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
chunks.push(chunk);
start = end;
}
return chunks;
}3.2 计算文件 Hash
/**
* 计算文件 Hash(MD5)
* @param {File} file - 文件对象
* @returns {Promise<string>} 文件 Hash
*/
async function calculateFileHash(file) {
return new Promise((resolve, reject) => {
const chunkSize = 2 * 1024 * 1024; // 2MB
const chunks = Math.ceil(file.size / chunkSize);
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
let currentChunk = 0;
fileReader.onload = (e) => {
spark.append(e.target.result);
currentChunk++;
if (currentChunk < chunks) {
loadNext();
} else {
resolve(spark.end());
}
};
fileReader.onerror = reject;
function loadNext() {
const start = currentChunk * chunkSize;
const end = Math.min(start + chunkSize, file.size);
fileReader.readAsArrayBuffer(file.slice(start, end));
}
loadNext();
});
}
// 使用 Web Crypto API(现代浏览器)
async function calculateFileHashWithCrypto(file) {
const chunkSize = 2 * 1024 * 1024; // 2MB
const chunks = Math.ceil(file.size / chunkSize);
const hashBuffer = await crypto.subtle.digest('SHA-256', await file.arrayBuffer());
const hashArray = Array.from(new Uint8Array(hashBuffer));
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
}3.3 上传单个片段
/**
* 上传文件片段
* @param {Blob} chunk - 文件片段
* @param {number} chunkIndex - 片段索引
* @param {string} fileHash - 文件 Hash
* @param {string} fileName - 文件名称
* @param {number} totalChunks - 总片段数
* @returns {Promise<void>}
*/
async function uploadChunk(chunk, chunkIndex, fileHash, fileName, totalChunks) {
const formData = new FormData();
formData.append('chunk', chunk);
formData.append('chunkIndex', chunkIndex);
formData.append('fileHash', fileHash);
formData.append('fileName', fileName);
formData.append('totalChunks', totalChunks);
return fetch('/api/upload/chunk', {
method: 'POST',
body: formData,
}).then(response => {
if (!response.ok) {
throw new Error(`Upload failed: ${response.statusText}`);
}
return response.json();
});
}3.4 并发上传控制
/**
* 并发上传文件片段
* @param {Array<Blob>} chunks - 文件片段数组
* @param {string} fileHash - 文件 Hash
* @param {string} fileName - 文件名称
* @param {number} concurrency - 并发数
* @param {Function} onProgress - 进度回调
* @returns {Promise<void>}
*/
async function uploadChunks(chunks, fileHash, fileName, concurrency = 3, onProgress) {
const totalChunks = chunks.length;
let uploadedChunks = 0;
const uploadedIndexes = new Set();
// 获取已上传的片段索引
const uploadedIndexesResponse = await fetch(`/api/upload/check?fileHash=${fileHash}`);
const { uploadedIndexes: serverUploadedIndexes } = await uploadedIndexesResponse.json();
serverUploadedIndexes.forEach(index => uploadedIndexes.add(index));
uploadedChunks = uploadedIndexes.size;
// 更新进度
onProgress?.({
uploaded: uploadedChunks,
total: totalChunks,
percentage: (uploadedChunks / totalChunks) * 100,
});
// 创建上传任务
const uploadTasks = chunks
.map((chunk, index) => ({
chunk,
index,
}))
.filter(({ index }) => !uploadedIndexes.has(index));
// 并发上传
const uploadPromises = [];
let currentIndex = 0;
async function uploadNext() {
if (currentIndex >= uploadTasks.length) {
return;
}
const { chunk, index } = uploadTasks[currentIndex++];
try {
await uploadChunk(chunk, index, fileHash, fileName, totalChunks);
uploadedIndexes.add(index);
uploadedChunks++;
// 更新进度
onProgress?.({
uploaded: uploadedChunks,
total: totalChunks,
percentage: (uploadedChunks / totalChunks) * 100,
});
} catch (error) {
console.error(`Chunk ${index} upload failed:`, error);
// 重试逻辑
throw error;
}
// 继续上传下一个
await uploadNext();
}
// 启动并发上传
const concurrentPromises = Array.from({ length: concurrency }, () => uploadNext());
await Promise.all(concurrentPromises);
// 所有片段上传完成后,通知服务器合并
if (uploadedChunks === totalChunks) {
await mergeChunks(fileHash, fileName);
}
}
/**
* 合并文件片段
* @param {string} fileHash - 文件 Hash
* @param {string} fileName - 文件名称
* @returns {Promise<void>}
*/
async function mergeChunks(fileHash, fileName) {
return fetch('/api/upload/merge', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
fileHash,
fileName,
}),
}).then(response => {
if (!response.ok) {
throw new Error(`Merge failed: ${response.statusText}`);
}
return response.json();
});
}3.5 进度监控
/**
* 上传进度监控
*/
class UploadProgress {
constructor() {
this.uploaded = 0;
this.total = 0;
this.speed = 0; // 字节/秒
this.remainingTime = 0; // 秒
this.startTime = Date.now();
this.lastUpdateTime = Date.now();
this.lastUploaded = 0;
}
update(uploaded, total) {
const now = Date.now();
const timeDelta = (now - this.lastUpdateTime) / 1000; // 秒
const uploadedDelta = uploaded - this.lastUploaded;
if (timeDelta > 0) {
this.speed = uploadedDelta / timeDelta;
const remaining = total - uploaded;
this.remainingTime = remaining / this.speed;
}
this.uploaded = uploaded;
this.total = total;
this.lastUpdateTime = now;
this.lastUploaded = uploaded;
}
getPercentage() {
return this.total > 0 ? (this.uploaded / this.total) * 100 : 0;
}
formatSpeed() {
if (this.speed < 1024) {
return `${this.speed.toFixed(2)} B/s`;
} else if (this.speed < 1024 * 1024) {
return `${(this.speed / 1024).toFixed(2)} KB/s`;
} else {
return `${(this.speed / (1024 * 1024)).toFixed(2)} MB/s`;
}
}
formatRemainingTime() {
if (this.remainingTime < 60) {
return `${Math.ceil(this.remainingTime)} 秒`;
} else if (this.remainingTime < 3600) {
return `${Math.ceil(this.remainingTime / 60)} 分钟`;
} else {
return `${Math.ceil(this.remainingTime / 3600)} 小时`;
}
}
}4. 完整 Demo
4.1 HTML 结构
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>大文件上传 Demo</title>
<style>
.upload-container {
max-width: 800px;
margin: 50px auto;
padding: 20px;
border: 1px solid #ddd;
border-radius: 8px;
}
.upload-area {
border: 2px dashed #ccc;
border-radius: 8px;
padding: 40px;
text-align: center;
cursor: pointer;
transition: border-color 0.3s;
}
.upload-area:hover {
border-color: #1890ff;
}
.upload-area.dragover {
border-color: #1890ff;
background-color: #f0f7ff;
}
.file-input {
display: none;
}
.file-list {
margin-top: 20px;
}
.file-item {
padding: 15px;
border: 1px solid #eee;
border-radius: 4px;
margin-bottom: 10px;
}
.file-info {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 10px;
}
.file-name {
font-weight: bold;
}
.file-size {
color: #999;
}
.progress-bar {
width: 100%;
height: 20px;
background-color: #f0f0f0;
border-radius: 10px;
overflow: hidden;
margin-bottom: 5px;
}
.progress-fill {
height: 100%;
background-color: #1890ff;
transition: width 0.3s;
}
.progress-info {
display: flex;
justify-content: space-between;
font-size: 12px;
color: #666;
}
.btn {
padding: 8px 16px;
border: none;
border-radius: 4px;
cursor: pointer;
margin-right: 10px;
}
.btn-primary {
background-color: #1890ff;
color: white;
}
.btn-danger {
background-color: #ff4d4f;
color: white;
}
</style>
</head>
<body>
<div class="upload-container">
<h1>大文件上传 Demo</h1>
<div class="upload-area" id="uploadArea">
<p>点击或拖拽文件到此区域上传</p>
<input type="file" id="fileInput" class="file-input" multiple>
</div>
<div class="file-list" id="fileList"></div>
</div>
<script src="https://cdn.jsdelivr.net/npm/spark-md5@3.0.2/spark-md5.min.js"></script>
<script src="upload.js"></script>
</body>
</html>4.2 JavaScript 实现
// upload.js
class LargeFileUploader {
constructor(options = {}) {
this.chunkSize = options.chunkSize || 2 * 1024 * 1024; // 2MB
this.concurrency = options.concurrency || 3; // 并发数
this.uploadUrl = options.uploadUrl || '/api/upload/chunk';
this.mergeUrl = options.mergeUrl || '/api/upload/merge';
this.checkUrl = options.checkUrl || '/api/upload/check';
this.files = new Map(); // 文件列表
}
/**
* 初始化
*/
init() {
const uploadArea = document.getElementById('uploadArea');
const fileInput = document.getElementById('fileInput');
// 点击上传区域
uploadArea.addEventListener('click', () => {
fileInput.click();
});
// 文件选择
fileInput.addEventListener('change', (e) => {
this.handleFiles(Array.from(e.target.files));
});
// 拖拽上传
uploadArea.addEventListener('dragover', (e) => {
e.preventDefault();
uploadArea.classList.add('dragover');
});
uploadArea.addEventListener('dragleave', () => {
uploadArea.classList.remove('dragover');
});
uploadArea.addEventListener('drop', (e) => {
e.preventDefault();
uploadArea.classList.remove('dragover');
this.handleFiles(Array.from(e.dataTransfer.files));
});
}
/**
* 处理文件
*/
async handleFiles(files) {
for (const file of files) {
await this.addFile(file);
}
}
/**
* 添加文件
*/
async addFile(file) {
const fileId = `${file.name}-${file.size}-${file.lastModified}`;
if (this.files.has(fileId)) {
console.warn('File already exists:', file.name);
return;
}
const fileItem = {
id: fileId,
file,
status: 'pending', // pending, hashing, uploading, success, error
progress: 0,
speed: 0,
remainingTime: 0,
hash: null,
chunks: null,
};
this.files.set(fileId, fileItem);
this.renderFileItem(fileItem);
// 开始上传
this.uploadFile(fileItem);
}
/**
* 上传文件
*/
async uploadFile(fileItem) {
try {
// 1. 计算文件 Hash
fileItem.status = 'hashing';
this.updateFileItem(fileItem);
fileItem.hash = await this.calculateFileHash(fileItem.file);
// 2. 检查服务器是否已有文件(秒传)
const checkResult = await this.checkFile(fileItem.hash);
if (checkResult.exists) {
fileItem.status = 'success';
fileItem.progress = 100;
this.updateFileItem(fileItem);
return;
}
// 3. 分割文件
fileItem.chunks = this.createFileChunks(fileItem.file);
// 4. 上传文件片段
fileItem.status = 'uploading';
this.updateFileItem(fileItem);
const progress = new UploadProgress();
await this.uploadChunks(
fileItem.chunks,
fileItem.hash,
fileItem.file.name,
(progressData) => {
progress.update(progressData.uploaded * this.chunkSize, fileItem.file.size);
fileItem.progress = progress.getPercentage();
fileItem.speed = progress.speed;
fileItem.remainingTime = progress.remainingTime;
this.updateFileItem(fileItem);
}
);
// 5. 合并文件片段
await this.mergeChunks(fileItem.hash, fileItem.file.name);
fileItem.status = 'success';
fileItem.progress = 100;
this.updateFileItem(fileItem);
} catch (error) {
console.error('Upload failed:', error);
fileItem.status = 'error';
fileItem.error = error.message;
this.updateFileItem(fileItem);
}
}
/**
* 计算文件 Hash
*/
async calculateFileHash(file) {
return new Promise((resolve, reject) => {
const chunkSize = 2 * 1024 * 1024; // 2MB
const chunks = Math.ceil(file.size / chunkSize);
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
let currentChunk = 0;
fileReader.onload = (e) => {
spark.append(e.target.result);
currentChunk++;
if (currentChunk < chunks) {
loadNext();
} else {
resolve(spark.end());
}
};
fileReader.onerror = reject;
function loadNext() {
const start = currentChunk * chunkSize;
const end = Math.min(start + chunkSize, file.size);
fileReader.readAsArrayBuffer(file.slice(start, end));
}
loadNext();
});
}
/**
* 检查文件是否存在
*/
async checkFile(fileHash) {
const response = await fetch(`${this.checkUrl}?fileHash=${fileHash}`);
return response.json();
}
/**
* 创建文件切片
*/
createFileChunks(file) {
const chunks = [];
let start = 0;
while (start < file.size) {
const end = Math.min(start + this.chunkSize, file.size);
const chunk = file.slice(start, end);
chunks.push(chunk);
start = end;
}
return chunks;
}
/**
* 上传文件片段
*/
async uploadChunks(chunks, fileHash, fileName, onProgress) {
const totalChunks = chunks.length;
let uploadedChunks = 0;
const uploadedIndexes = new Set();
// 获取已上传的片段索引
try {
const response = await fetch(`${this.checkUrl}?fileHash=${fileHash}`);
const { uploadedIndexes: serverUploadedIndexes } = await response.json();
serverUploadedIndexes.forEach(index => uploadedIndexes.add(index));
uploadedChunks = uploadedIndexes.size;
onProgress?.({ uploaded: uploadedChunks, total: totalChunks });
} catch (error) {
console.warn('Failed to check uploaded chunks:', error);
}
// 创建上传任务
const uploadTasks = chunks
.map((chunk, index) => ({ chunk, index }))
.filter(({ index }) => !uploadedIndexes.has(index));
// 并发上传
const semaphore = new Semaphore(this.concurrency);
const uploadPromises = uploadTasks.map(({ chunk, index }) =>
semaphore.acquire().then(async (release) => {
try {
await this.uploadChunk(chunk, index, fileHash, fileName, totalChunks);
uploadedIndexes.add(index);
uploadedChunks++;
onProgress?.({ uploaded: uploadedChunks, total: totalChunks });
} finally {
release();
}
})
);
await Promise.all(uploadPromises);
}
/**
* 上传单个片段
*/
async uploadChunk(chunk, chunkIndex, fileHash, fileName, totalChunks) {
const formData = new FormData();
formData.append('chunk', chunk);
formData.append('chunkIndex', chunkIndex);
formData.append('fileHash', fileHash);
formData.append('fileName', fileName);
formData.append('totalChunks', totalChunks);
const response = await fetch(this.uploadUrl, {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error(`Upload failed: ${response.statusText}`);
}
return response.json();
}
/**
* 合并文件片段
*/
async mergeChunks(fileHash, fileName) {
const response = await fetch(this.mergeUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
fileHash,
fileName,
}),
});
if (!response.ok) {
throw new Error(`Merge failed: ${response.statusText}`);
}
return response.json();
}
/**
* 渲染文件项
*/
renderFileItem(fileItem) {
const fileList = document.getElementById('fileList');
const fileItemEl = document.createElement('div');
fileItemEl.className = 'file-item';
fileItemEl.id = `file-${fileItem.id}`;
fileItemEl.innerHTML = `
<div class="file-info">
<span class="file-name">${fileItem.file.name}</span>
<span class="file-size">${this.formatFileSize(fileItem.file.size)}</span>
</div>
<div class="progress-bar">
<div class="progress-fill" style="width: ${fileItem.progress}%"></div>
</div>
<div class="progress-info">
<span>${fileItem.status}</span>
<span>${fileItem.progress.toFixed(2)}%</span>
<span>${this.formatSpeed(fileItem.speed)}</span>
<span>${this.formatRemainingTime(fileItem.remainingTime)}</span>
</div>
`;
fileList.appendChild(fileItemEl);
}
/**
* 更新文件项
*/
updateFileItem(fileItem) {
const fileItemEl = document.getElementById(`file-${fileItem.id}`);
if (!fileItemEl) return;
const progressFill = fileItemEl.querySelector('.progress-fill');
const progressInfo = fileItemEl.querySelector('.progress-info');
if (progressFill) {
progressFill.style.width = `${fileItem.progress}%`;
}
if (progressInfo) {
progressInfo.innerHTML = `
<span>${fileItem.status}</span>
<span>${fileItem.progress.toFixed(2)}%</span>
<span>${this.formatSpeed(fileItem.speed)}</span>
<span>${this.formatRemainingTime(fileItem.remainingTime)}</span>
`;
}
}
/**
* 格式化文件大小
*/
formatFileSize(bytes) {
if (bytes < 1024) return bytes + ' B';
if (bytes < 1024 * 1024) return (bytes / 1024).toFixed(2) + ' KB';
if (bytes < 1024 * 1024 * 1024) return (bytes / (1024 * 1024)).toFixed(2) + ' MB';
return (bytes / (1024 * 1024 * 1024)).toFixed(2) + ' GB';
}
/**
* 格式化速度
*/
formatSpeed(speed) {
if (speed < 1024) return speed.toFixed(2) + ' B/s';
if (speed < 1024 * 1024) return (speed / 1024).toFixed(2) + ' KB/s';
return (speed / (1024 * 1024)).toFixed(2) + ' MB/s';
}
/**
* 格式化剩余时间
*/
formatRemainingTime(seconds) {
if (seconds < 60) return Math.ceil(seconds) + ' 秒';
if (seconds < 3600) return Math.ceil(seconds / 60) + ' 分钟';
return Math.ceil(seconds / 3600) + ' 小时';
}
}
/**
* 信号量(控制并发)
*/
class Semaphore {
constructor(count) {
this.count = count;
this.waiting = [];
}
acquire() {
return new Promise((resolve) => {
if (this.count > 0) {
this.count--;
resolve(() => {
this.count++;
if (this.waiting.length > 0) {
const next = this.waiting.shift();
this.count--;
next();
}
});
} else {
this.waiting.push(() => {
resolve(() => {
this.count++;
if (this.waiting.length > 0) {
const next = this.waiting.shift();
this.count--;
next();
}
});
});
}
});
}
}
/**
* 上传进度
*/
class UploadProgress {
constructor() {
this.uploaded = 0;
this.total = 0;
this.speed = 0;
this.remainingTime = 0;
this.lastUpdateTime = Date.now();
this.lastUploaded = 0;
}
update(uploaded, total) {
const now = Date.now();
const timeDelta = (now - this.lastUpdateTime) / 1000;
const uploadedDelta = uploaded - this.lastUploaded;
if (timeDelta > 0) {
this.speed = uploadedDelta / timeDelta;
const remaining = total - uploaded;
this.remainingTime = remaining / this.speed;
}
this.uploaded = uploaded;
this.total = total;
this.lastUpdateTime = now;
this.lastUploaded = uploaded;
}
getPercentage() {
return this.total > 0 ? (this.uploaded / this.total) * 100 : 0;
}
}
// 初始化
const uploader = new LargeFileUploader({
chunkSize: 2 * 1024 * 1024, // 2MB
concurrency: 3, // 并发数
uploadUrl: '/api/upload/chunk',
mergeUrl: '/api/upload/merge',
checkUrl: '/api/upload/check',
});
uploader.init();4.3 后端接口示例(Node.js)
// server.js (Express)
const express = require('express');
const multer = require('multer');
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const app = express();
const upload = multer({ dest: 'uploads/chunks/' });
// 存储文件信息
const fileInfo = new Map();
// 上传文件片段
app.post('/api/upload/chunk', upload.single('chunk'), (req, res) => {
const { chunkIndex, fileHash, fileName, totalChunks } = req.body;
const chunk = req.file;
// 保存文件信息
if (!fileInfo.has(fileHash)) {
fileInfo.set(fileHash, {
fileName,
totalChunks: parseInt(totalChunks),
chunks: new Set(),
});
}
const info = fileInfo.get(fileHash);
info.chunks.add(parseInt(chunkIndex));
// 重命名文件片段
const chunkPath = path.join('uploads/chunks', `${fileHash}-${chunkIndex}`);
fs.renameSync(chunk.path, chunkPath);
res.json({
success: true,
chunkIndex: parseInt(chunkIndex),
});
});
// 检查文件是否存在
app.get('/api/upload/check', (req, res) => {
const { fileHash } = req.query;
// 检查文件是否已存在
const filePath = path.join('uploads', fileHash);
if (fs.existsSync(filePath)) {
return res.json({
exists: true,
filePath,
});
}
// 检查已上传的片段
const info = fileInfo.get(fileHash);
if (info) {
return res.json({
exists: false,
uploadedIndexes: Array.from(info.chunks),
});
}
res.json({
exists: false,
uploadedIndexes: [],
});
});
// 合并文件片段
app.post('/api/upload/merge', (req, res) => {
const { fileHash, fileName } = req.body;
const info = fileInfo.get(fileHash);
if (!info || info.chunks.size !== info.totalChunks) {
return res.status(400).json({
success: false,
message: 'Not all chunks uploaded',
});
}
// 合并文件片段
const filePath = path.join('uploads', fileHash);
const writeStream = fs.createWriteStream(filePath);
for (let i = 0; i < info.totalChunks; i++) {
const chunkPath = path.join('uploads/chunks', `${fileHash}-${i}`);
const chunkData = fs.readFileSync(chunkPath);
writeStream.write(chunkData);
fs.unlinkSync(chunkPath); // 删除片段
}
writeStream.end();
// 删除文件信息
fileInfo.delete(fileHash);
res.json({
success: true,
filePath,
});
});
app.listen(3000, () => {
console.log('Server running on http://localhost:3000');
});5. 进阶优化
5.1 Web Workers 计算 Hash
// hash-worker.js
self.onmessage = function(e) {
const { file, chunkSize } = e.data;
const chunks = Math.ceil(file.size / chunkSize);
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
let currentChunk = 0;
fileReader.onload = function(e) {
spark.append(e.target.result);
currentChunk++;
self.postMessage({
progress: (currentChunk / chunks) * 100,
});
if (currentChunk < chunks) {
loadNext();
} else {
self.postMessage({
hash: spark.end(),
progress: 100,
});
}
};
fileReader.onerror = function(error) {
self.postMessage({
error: error.message,
});
};
function loadNext() {
const start = currentChunk * chunkSize;
const end = Math.min(start + chunkSize, file.size);
fileReader.readAsArrayBuffer(file.slice(start, end));
}
loadNext();
};
// 使用
const worker = new Worker('hash-worker.js');
worker.postMessage({ file, chunkSize: 2 * 1024 * 1024 });
worker.onmessage = function(e) {
const { hash, progress, error } = e.data;
if (error) {
console.error('Hash calculation failed:', error);
} else if (hash) {
console.log('File hash:', hash);
} else {
console.log('Progress:', progress);
}
};5.2 使用 Streams API
/**
* 使用 Streams API 上传文件
*/
async function uploadFileWithStream(file) {
const reader = file.stream().getReader();
const fileHash = await calculateFileHash(file);
let chunkIndex = 0;
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = new Blob([value]);
await uploadChunk(chunk, chunkIndex++, fileHash, file.name);
}
}5.3 压缩上传
/**
* 压缩后上传
*/
async function uploadCompressedFile(file) {
// 使用 compression-streams API(实验性)
const compressionStream = new CompressionStream('gzip');
const compressedStream = file.stream().pipeThrough(compressionStream);
const compressedFile = await new Response(compressedStream).blob();
// 上传压缩后的文件
await uploadFile(compressedFile);
}5.4 图片预览和压缩
/**
* 图片预览和压缩
*/
function compressImage(file, maxWidth = 1920, quality = 0.8) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (e) => {
const img = new Image();
img.onload = () => {
const canvas = document.createElement('canvas');
let width = img.width;
let height = img.height;
if (width > maxWidth) {
height = (height * maxWidth) / width;
width = maxWidth;
}
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0, width, height);
canvas.toBlob(resolve, 'image/jpeg', quality);
};
img.onerror = reject;
img.src = e.target.result;
};
reader.onerror = reject;
reader.readAsDataURL(file);
});
}6. 最佳实践
6.1 错误处理
/**
* 重试机制
*/
async function uploadChunkWithRetry(chunk, chunkIndex, fileHash, fileName, totalChunks, maxRetries = 3) {
let retries = 0;
while (retries < maxRetries) {
try {
await uploadChunk(chunk, chunkIndex, fileHash, fileName, totalChunks);
return;
} catch (error) {
retries++;
if (retries >= maxRetries) {
throw error;
}
await new Promise(resolve => setTimeout(resolve, 1000 * retries)); // 指数退避
}
}
}6.2 取消上传
/**
* 可取消的上传
*/
class CancellableUpload {
constructor() {
this.cancelled = false;
this.abortController = new AbortController();
}
cancel() {
this.cancelled = true;
this.abortController.abort();
}
async uploadChunk(chunk, chunkIndex, fileHash, fileName, totalChunks) {
if (this.cancelled) {
throw new Error('Upload cancelled');
}
const formData = new FormData();
formData.append('chunk', chunk);
formData.append('chunkIndex', chunkIndex);
formData.append('fileHash', fileHash);
formData.append('fileName', fileName);
formData.append('totalChunks', totalChunks);
return fetch('/api/upload/chunk', {
method: 'POST',
body: formData,
signal: this.abortController.signal,
});
}
}6.3 性能优化
- 合理设置分片大小:根据网络状况动态调整
- 控制并发数:避免过多并发请求
- 使用 Web Workers:将 Hash 计算移到 Worker 线程
- 使用 IndexedDB:缓存文件信息和上传状态
- 使用 Service Worker:支持离线上传
6.4 安全性
- 文件类型验证:检查文件 MIME 类型和扩展名
- 文件大小限制:限制单个文件和总文件大小
- 文件名过滤:防止路径遍历攻击
- HTTPS:使用 HTTPS 传输文件
- Token 验证:上传前验证用户身份
🔗 相关链接
前置知识
- File API — 文件对象操作
- Blob API — 二进制大对象
- FileReader API — 文件读取
- FormData API — 表单数据和文件上传
- Fetch API — 网络请求
进阶学习
- Web Workers — 多线程处理
- IndexedDB — 客户端存储
- Service Worker — 离线支持
最后更新:2025
维护规范:每次更新后,同步更新相关 API 文档
前端 文件上传 大文件上传 分片上传 断点续传 FileAPI BlobAPI FileReaderAPI StreamsAPI