关于解决大文件上传超时的问题

场景

项目有个大文件上传的功能,上传几百MB的文件总是超时

一开始以为是Nginx的问题,后来发现是多个地方都要配置

记录一下解决过程

问题分析

大文件上传超时可能有几个原因:

  1. Nginx限制
  2. SpringBoot限制
  3. 网络超时
  4. 浏览器超时

解决方案

1. Nginx配置

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
http {
# 允许上传的最大文件大小
client_max_body_size 500M;

# 客户端请求体超时时间
client_body_timeout 300s;

# 客户端请求头超时时间
client_header_timeout 300s;

# 读取超时
send_timeout 300s;

server {
listen 80;
server_name example.com;

# 代理超时配置
location /upload {
proxy_pass http://backend;
proxy_connect_timeout 300s;
proxy_send_timeout 300s;
proxy_read_timeout 300s;

# 传递请求体
proxy_request_buffering off;
}
}
}

2. SpringBoot配置

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
spring:
servlet:
multipart:
enabled: true
# 最大文件大小
max-file-size: 500MB
# 最大请求大小
max-request-size: 500MB
# 是否延迟解析
file-size-threshold: 0

server:
# Tomcat连接超时
connection-timeout: 300000
# Tomcat保持连接超时
keep-alive-timeout: 300000

3. 分片上传(推荐)

对于大文件,最好使用分片上传

前端实现

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
class FileUploader {
constructor(file, chunkSize = 10 * 1024 * 1024) {
this.file = file;
this.chunkSize = chunkSize;
this.chunks = Math.ceil(file.size / chunkSize);
this.uploadedChunks = 0;
}

async upload(url) {
const fileId = this.generateFileId();

for (let i = 0; i < this.chunks; i++) {
const start = i * this.chunkSize;
const end = Math.min(start + this.chunkSize, this.file.size);
const chunk = this.file.slice(start, end);

await this.uploadChunk(url, fileId, i, chunk);
this.uploadedChunks++;

console.log(`上传进度: ${((this.uploadedChunks / this.chunks) * 100).toFixed(2)}%`);
}

// 所有分片上传完成,通知服务器合并
await this.mergeChunks(url, fileId);

return fileId;
}

uploadChunk(url, fileId, chunkIndex, chunk) {
const formData = new FormData();
formData.append('fileId', fileId);
formData.append('chunkIndex', chunkIndex);
formData.append('totalChunks', this.chunks);
formData.append('chunk', chunk);

return axios.post(url + '/chunk', formData, {
headers: {
'Content-Type': 'multipart/form-data'
},
timeout: 60000 // 每个分片的超时时间
});
}

mergeChunks(url, fileId) {
return axios.post(url + '/merge', {
fileId: fileId,
fileName: this.file.name,
totalChunks: this.chunks
});
}

generateFileId() {
return `${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
}

// 使用示例
const fileInput = document.getElementById('fileInput');
fileInput.addEventListener('change', async (e) => {
const file = e.target.files[0];
const uploader = new FileUploader(file);

try {
const fileId = await uploader.upload('/api/upload');
console.log('上传成功:', fileId);
} catch (error) {
console.error('上传失败:', error);
}
});

后端实现

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
@RestController
@RequestMapping("/api/upload")
public class FileUploadController {

private static final String UPLOAD_DIR = "/tmp/uploads";

@PostMapping("/chunk")
public ResponseResult uploadChunk(
@RequestParam("fileId") String fileId,
@RequestParam("chunkIndex") Integer chunkIndex,
@RequestParam("totalChunks") Integer totalChunks,
@RequestParam("chunk") MultipartFile chunk) {

try {
// 创建分片目录
File chunkDir = new File(UPLOAD_DIR, fileId);
if (!chunkDir.exists()) {
chunkDir.mkdirs();
}

// 保存分片
File chunkFile = new File(chunkDir, chunkIndex.toString());
chunk.transferTo(chunkFile);

return ResponseResult.success("分片上传成功");
} catch (IOException e) {
return ResponseResult.error("分片上传失败");
}
}

@PostMapping("/merge")
public ResponseResult mergeChunks(
@RequestParam("fileId") String fileId,
@RequestParam("fileName") String fileName,
@RequestParam("totalChunks") Integer totalChunks) {

try {
File chunkDir = new File(UPLOAD_DIR, fileId);
File mergedFile = new File(UPLOAD_DIR, fileName);

// 合并分片
try (FileOutputStream fos = new FileOutputStream(mergedFile)) {
for (int i = 0; i < totalChunks; i++) {
File chunkFile = new File(chunkDir, String.valueOf(i));
try (FileInputStream fis = new FileInputStream(chunkFile)) {
IOUtils.copy(fis, fos);
}
}
}

// 删除分片目录
FileUtils.deleteDirectory(chunkDir);

return ResponseResult.success("文件合并成功", mergedFile.getAbsolutePath());
} catch (IOException e) {
return ResponseResult.error("文件合并失败");
}
}
}

4. 断点续传

为了支持断点续传,需要记录已上传的分片

前端实现

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
class ResumableFileUploader extends FileUploader {
constructor(file, chunkSize = 10 * 1024 * 1024) {
super(file, chunkSize);
this.fileId = localStorage.getItem(`file_${file.name}_${file.size}`);
}

async upload(url) {
if (!this.fileId) {
this.fileId = this.generateFileId();
localStorage.setItem(`file_${this.file.name}_${this.file.size}`, this.fileId);
}

// 查询已上传的分片
const uploadedChunks = await this.getUploadedChunks(url, this.fileId);

for (let i = 0; i < this.chunks; i++) {
if (uploadedChunks.includes(i)) {
this.uploadedChunks++;
continue;
}

const start = i * this.chunkSize;
const end = Math.min(start + this.chunkSize, this.file.size);
const chunk = this.file.slice(start, end);

try {
await this.uploadChunk(url, this.fileId, i, chunk);
this.uploadedChunks++;
} catch (error) {
console.error(`分片${i}上传失败,稍后重试`, error);
// 可以在这里实现重试逻辑
}
}

await this.mergeChunks(url, this.fileId);

// 清除localStorage
localStorage.removeItem(`file_${this.file.name}_${this.file.size}`);

return this.fileId;
}

getUploadedChunks(url, fileId) {
return axios.get(url + '/uploaded', {
params: { fileId }
}).then(response => response.data);
}
}

后端实现

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
@GetMapping("/uploaded")
public ResponseResult getUploadedChunks(@RequestParam("fileId") String fileId) {
File chunkDir = new File(UPLOAD_DIR, fileId);
if (!chunkDir.exists()) {
return ResponseResult.success(Collections.emptyList());
}

File[] chunkFiles = chunkDir.listFiles();
if (chunkFiles == null) {
return ResponseResult.success(Collections.emptyList());
}

List<Integer> uploadedChunks = Arrays.stream(chunkFiles)
.map(File::getName)
.map(Integer::parseInt)
.collect(Collectors.toList());

return ResponseResult.success(uploadedChunks);
}

5. 使用Nginx的upload module

如果追求性能,可以用Nginx的upload module:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
http {
upload_progress uploads 1m;

server {
location /upload {
upload_pass /upload-backend;
upload_store /tmp/uploads;
upload_store_access user:rw group:rw all:rw;

upload_set_form_field $upload_field_name.name "$upload_file_name";
upload_set_form_field $upload_field_name.content_type "$upload_content_type";
upload_set_form_field $upload_field_name.path "$upload_tmp_path";

upload_pass_form_field "^.*$";

track_uploads uploads 60s;
}

location /upload-backend {
proxy_pass http://backend;
}
}
}

6. 进度显示

前端显示上传进度:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
async uploadWithProgress(url, onProgress) {
const fileId = this.generateFileId();

for (let i = 0; i < this.chunks; i++) {
const start = i * this.chunkSize;
const end = Math.min(start + this.chunkSize, this.file.size);
const chunk = this.file.slice(start, end);

await this.uploadChunk(url, fileId, i, chunk);
this.uploadedChunks++;

if (onProgress) {
onProgress({
loaded: this.uploadedChunks * this.chunkSize,
total: this.file.size,
percentage: (this.uploadedChunks / this.chunks) * 100
});
}
}

await this.mergeChunks(url, fileId);
return fileId;
}

// 使用
const uploader = new FileUploader(file);
await uploader.uploadWithProgress('/api/upload', (progress) => {
console.log(`上传进度: ${progress.percentage.toFixed(2)}%`);
document.getElementById('progress').value = progress.percentage;
});

总结

大文件上传主要注意:

  1. Nginx配置要放宽限制
  2. SpringBoot配置也要放开
  3. 推荐使用分片上传
  4. 实现断点续传提升用户体验
  5. 显示上传进度

暂时就先记录这么多