Browse Source

feat: 大文件上传

master
Administrator 2 months ago
parent
commit
1e7280cb42
  1. 43
      README.md
  2. 27
      src/main/java/org/ycloud/aipan/component/LocalFileStoreEngine.java
  3. 59
      src/main/java/org/ycloud/aipan/component/MinIOFileStoreEngine.java
  4. 23
      src/main/java/org/ycloud/aipan/component/OSSFileStoreEngine.java
  5. 49
      src/main/java/org/ycloud/aipan/component/StoreEngine.java
  6. 19
      src/main/java/org/ycloud/aipan/config/InterceptorConfig.java
  7. 51
      src/main/java/org/ycloud/aipan/controller/FileController.java
  8. 26
      src/main/java/org/ycloud/aipan/controller/req/FileChunkInitTaskReq.java
  9. 17
      src/main/java/org/ycloud/aipan/controller/req/FileChunkMergeReq.java
  10. 66
      src/main/java/org/ycloud/aipan/dto/FileChunkDTO.java
  11. 3
      src/main/java/org/ycloud/aipan/interceptor/LoginInterceptor.java
  12. 3
      src/main/java/org/ycloud/aipan/model/FileChunkDO.java
  13. 3
      src/main/java/org/ycloud/aipan/service/AccountFileService.java
  14. 28
      src/main/java/org/ycloud/aipan/service/FileChunkService.java
  15. 1
      src/main/java/org/ycloud/aipan/service/impl/AccountFileServiceImpl.java
  16. 172
      src/main/java/org/ycloud/aipan/service/impl/FileChunkServiceImpl.java
  17. 1
      src/main/resources/application.yml
  18. 425
      src/main/resources/static/fileupload.html
  19. 1
      src/main/resources/static/spark-md5.min.js
  20. 2
      src/test/java/org/ycloud/aipan/AmazonS3ClientTests.java
  21. 150
      src/test/java/org/ycloud/aipan/BigFileUploadTest.java
  22. 158
      src/test/java/org/ycloud/aipan/FileChunkUploadTests.java

43
README.md

@ -4244,7 +4244,6 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
} }
``` ```
### 大文件上传接口开发和全链路测试 ### 大文件上传接口开发和全链路测试
@ -5856,9 +5855,9 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 因为市场人员缺少太多,企业招聘不到人员,就会降低学历提高待遇,这个就是机会 - 因为市场人员缺少太多,企业招聘不到人员,就会降低学历提高待遇,这个就是机会
- 大专以上学历+后端/前端、测试能力 ,就可以学!! - 大专以上学历+后端/前端、测试能力 ,就可以学!!
![image-20250215115400776](file:///./img/image-20250215115400776.png?lastModify=1750210947) ![image-20250215115400776](./img/image-20250215115400776.png?lastModify=1750210947)
![image-20250215115329505](file:///./img/image-20250215115329505.png?lastModify=1750210947) ![image-20250215115329505](./img/image-20250215115329505.png?lastModify=1750210947)
- 学习我们小滴课堂的兄弟们的基本画像说明 - 学习我们小滴课堂的兄弟们的基本画像说明
@ -6121,7 +6120,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
| **典型错误类型** | 可能偏离指令或生成不相关内容 | 逻辑漏洞、计算错误或步骤缺失 | | **典型错误类型** | 可能偏离指令或生成不相关内容 | 逻辑漏洞、计算错误或步骤缺失 |
| **资源消耗** | 通常更轻量(可部署较小参数模型) | 需要更大参数量支持复杂推理 | | **资源消耗** | 通常更轻量(可部署较小参数模型) | 需要更大参数量支持复杂推理 |
![image-20250215170037818](file:///./img/image-20250215170037818.png?lastModify=1750210947) ![image-20250215170037818](./img/image-20250215170037818.png?lastModify=1750210947)
- 案例应用说明 - 案例应用说明
@ -6131,7 +6130,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 模型选择决策 - 模型选择决策
![image-20250215165633327](file:///./img/image-20250215165633327.png?lastModify=1750210947) ![image-20250215165633327](./img/image-20250215165633327.png?lastModify=1750210947)
- 案例代码参考 - 案例代码参考
@ -6336,7 +6335,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 找个LLM进行测试 `我今天被公司解雇了,很难过` 0.2和0.7温度的区别 - 找个LLM进行测试 `我今天被公司解雇了,很难过` 0.2和0.7温度的区别
![image-20250217140255457](file:///./img/image-20250217140255457.png?lastModify=1750210947) ![image-20250217140255457](./img/image-20250217140255457.png?lastModify=1750210947)
- 预训练(Pre-training) - 预训练(Pre-training)
@ -6358,7 +6357,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 通俗解释:通过人类评分优化模型输出的"AI教练系统" - 通俗解释:通过人类评分优化模型输出的"AI教练系统"
- 训练流程 - 训练流程
![export_3t84b](file:///./img/export_3t84b.png?lastModify=1750210947) ![export_3t84b](./img/export_3t84b.png?lastModify=1750210947)
- 模型蒸馏(Knowledge Distillation) - 模型蒸馏(Knowledge Distillation)
@ -6443,7 +6442,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 很多同学看到大模型的文档里有token,好奇这个是做啥的,和JWT的token啥区别? - 很多同学看到大模型的文档里有token,好奇这个是做啥的,和JWT的token啥区别?
- 包括很多在线的LLM大模型接口里面,按照token进行收费的 - 包括很多在线的LLM大模型接口里面,按照token进行收费的
![image-20250217154619961](file:///./img/image-20250217154619961.png?lastModify=1750210947) ![image-20250217154619961](./img/image-20250217154619961.png?lastModify=1750210947)
- 什么是LLM里面的token - 什么是LLM里面的token
- Token 是文本的基本单位,用于将文本分解为模型能够处理的最小单元 - Token 是文本的基本单位,用于将文本分解为模型能够处理的最小单元
@ -6492,7 +6491,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 私有化部署大模型后,使用方式也都是调用接口 - 私有化部署大模型后,使用方式也都是调用接口
- 不同企业、经费、数据安全和项目领域也决定如何选择 - 不同企业、经费、数据安全和项目领域也决定如何选择
![image-20250217162419684](file:///./img/image-20250217162419684.png?lastModify=1750210947) ![image-20250217162419684](./img/image-20250217162419684.png?lastModify=1750210947)
- 关键差异点对比 - 关键差异点对比
@ -6547,7 +6546,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 包括后续框架整合也是,请求协议基本和OpenAI一样, 毕竟龙头老大,也方便迁移 - 包括后续框架整合也是,请求协议基本和OpenAI一样, 毕竟龙头老大,也方便迁移
- 由于国内网络访问限制,OpenAI应用的开发也可以直接换国内的大模型,开发一样 - 由于国内网络访问限制,OpenAI应用的开发也可以直接换国内的大模型,开发一样
![image-20250217162403779](file:///./img/image-20250217162403779.png?lastModify=1750210947) ![image-20250217162403779](./img/image-20250217162403779.png?lastModify=1750210947)
- OpenAI 提供的SDK 来调用大模型 - OpenAI 提供的SDK 来调用大模型
@ -6573,13 +6572,13 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 地址:https://bailian.console.aliyun.com/ - 地址:https://bailian.console.aliyun.com/
![image-20250217145235399](file:///./img/image-20250217145235399.png?lastModify=1750210947) ![image-20250217145235399](./img/image-20250217145235399.png?lastModify=1750210947)
- **DeepSeek**(深度求索) - **DeepSeek**(深度求索)
- 地址:https://api-docs.deepseek.com/zh-cn/ - 地址:https://api-docs.deepseek.com/zh-cn/
![image-20250217145923315](file:///./img/image-20250217145923315.png?lastModify=1750210947) ![image-20250217145923315](./img/image-20250217145923315.png?lastModify=1750210947)
- **Kimi Chat**(月之暗面) - **Kimi Chat**(月之暗面)
@ -6614,7 +6613,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 官网:https://lmstudio.ai/ - 官网:https://lmstudio.ai/
- 部署过程相对简单,支持Win、Mac、Linux - 部署过程相对简单,支持Win、Mac、Linux
![image-20250217171804241](file:///./img/image-20250217171804241.png?lastModify=1750210947) ![image-20250217171804241](./img/image-20250217171804241.png?lastModify=1750210947)
- 本地部署 DeepSeek 硬件配置 - 本地部署 DeepSeek 硬件配置
@ -6664,7 +6663,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
| **成本范围** | ¥400,000+ | ¥20,000,000+ | | **成本范围** | ¥400,000+ | ¥20,000,000+ |
| **生态支持** | HuggingFace加速库优化 | 定制化CUDA内核+混合精度训练 | | **生态支持** | HuggingFace加速库优化 | 定制化CUDA内核+混合精度训练 |
![image-20250217174017791](file:///./img/image-20250217174017791.png?lastModify=1750210947) ![image-20250217174017791](./img/image-20250217174017791.png?lastModify=1750210947)
#### Ollama介绍和本地快速安装 #### Ollama介绍和本地快速安装
@ -6684,7 +6683,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 文档:https://github.com/ollama/ollama/blob/main/README.md#quickstart - 文档:https://github.com/ollama/ollama/blob/main/README.md#quickstart
- 安装实操(官网下载对应的包,不同系统选择不一样) - 安装实操(官网下载对应的包,不同系统选择不一样)
![image-20250217175605827](file:///./img/image-20250217175605827.png?lastModify=1750210947) ![image-20250217175605827](./img/image-20250217175605827.png?lastModify=1750210947)
@ -6721,7 +6720,7 @@ public JsonData list(@RequestParam(value = "parent_id")Long parentId){
- 使用ollama部署DeepSeek大模型,deepseek-r1:7b,deepseek-r1:14b - 使用ollama部署DeepSeek大模型,deepseek-r1:7b,deepseek-r1:14b
- 注意:电脑配置不高的,不要部署14b哈 - 注意:电脑配置不高的,不要部署14b哈
![image-20250217180343600](file:///./img/image-20250217180343600.png?lastModify=1750210947) ![image-20250217180343600](./img/image-20250217180343600.png?lastModify=1750210947)
- 部署实战 - 部署实战
@ -6735,7 +6734,7 @@ ollama run deepseek-r1:14b
- 问题: `算下deeeeep里面有几个e` - 问题: `算下deeeeep里面有几个e`
![image-20250218215557400](file:///./img/image-20250218215557400.png?lastModify=1750210947) ![image-20250218215557400](./img/image-20250218215557400.png?lastModify=1750210947)
#### AI大模型可视化界面介绍和部署实战 #### AI大模型可视化界面介绍和部署实战
@ -6756,7 +6755,7 @@ ollama run deepseek-r1:14b
- 支持多服务商集成的AI对话客户端 - 支持多服务商集成的AI对话客户端
- 地址:https://cherry-ai.com/ - 地址:https://cherry-ai.com/
![image-20250217183246768](file:///./img/image-20250217183246768.png?lastModify=1750210947) ![image-20250217183246768](./img/image-20250217183246768.png?lastModify=1750210947)
- Chatbox - Chatbox
@ -6764,14 +6763,14 @@ ollama run deepseek-r1:14b
- 配置Ollama允许远程连接 https://chatboxai.app/zh/help-center/connect-chatbox-remote-ollama-service-guide - 配置Ollama允许远程连接 https://chatboxai.app/zh/help-center/connect-chatbox-remote-ollama-service-guide
- 地址:https://chatboxai.app/ - 地址:https://chatboxai.app/
![image-20250217183301366](file:///./img/image-20250217183301366.png?lastModify=1750210947) ![image-20250217183301366](./img/image-20250217183301366.png?lastModify=1750210947)
- Chatbox安装和实操 - Chatbox安装和实操
- 安装包:官网直接下载,根据自己的系统选择 - 安装包:官网直接下载,根据自己的系统选择
- 配置大模型服务 - 配置大模型服务
![image-20250217184140019](file:///./img/image-20250217184140019.png?lastModify=1750210947) ![image-20250217184140019](./img/image-20250217184140019.png?lastModify=1750210947)
@ -6883,7 +6882,7 @@ ollama run deepseek-r1:14b
- 参考文档:https://help.aliyun.com/zh/model-studio/developer-reference/use-qwen-by-calling-api - 参考文档:https://help.aliyun.com/zh/model-studio/developer-reference/use-qwen-by-calling-api
- 下面的返回协议 - 下面的返回协议
![image-20250220160752180](file:///./img/image-20250220160752180.png?lastModify=1750210947) ![image-20250220160752180](./img/image-20250220160752180.png?lastModify=1750210947)
@ -6994,7 +6993,7 @@ ollama run deepseek-r1:14b
- 技术选型架构图 - 技术选型架构图
![image-20250221114009228](file:///./img/image-20250221114009228.png?lastModify=1750210947) ![image-20250221114009228](./img/image-20250221114009228.png?lastModify=1750210947)
- 开发AI大模型的Python技术选型(部分技术选型) - 开发AI大模型的Python技术选型(部分技术选型)

27
src/main/java/org/ycloud/aipan/component/LocalFileStoreEngine.java

@ -1,12 +1,15 @@
package org.ycloud.aipan.component; package org.ycloud.aipan.component;
import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.HttpMethod;
import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.*;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import java.net.URL;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
//@Component //@Component
@ -65,4 +68,24 @@ public class LocalFileStoreEngine implements StoreEngine{
public void download2Response(String bucketName, String objectKey, HttpServletResponse response) { public void download2Response(String bucketName, String objectKey, HttpServletResponse response) {
} }
@Override
public PartListing listMultipart(String bucketName, String objectKey, String uploadId) {
return null;
}
@Override
public InitiateMultipartUploadResult initMultipartUploadTask(String bucketName, String objectKey, ObjectMetadata metadata) {
return null;
}
@Override
public URL genePreSignedUrl(String bucketName, String objectKey, HttpMethod httpMethod, Date expiration, Map<String, Object> params) {
return null;
}
@Override
public CompleteMultipartUploadResult mergeChunks(String bucketName, String objectKey, String uploadId, List<PartETag> partETags) {
return null;
}
} }

59
src/main/java/org/ycloud/aipan/component/MinIOFileStoreEngine.java

@ -1,19 +1,19 @@
package org.ycloud.aipan.component; package org.ycloud.aipan.component;
import com.amazonaws.HttpMethod;
import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.services.s3.model.*;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import jakarta.annotation.Resource; import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.tomcat.util.http.fileupload.IOUtils; import org.apache.tomcat.util.http.fileupload.IOUtils;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.URL;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -22,6 +22,7 @@ import java.util.concurrent.TimeUnit;
@Slf4j @Slf4j
@Component @Component
@Primary
public class MinIOFileStoreEngine implements StoreEngine { public class MinIOFileStoreEngine implements StoreEngine {
@Resource @Resource
@ -223,4 +224,54 @@ public class MinIOFileStoreEngine implements StoreEngine {
log.error("下载 bucket {} 中对象 {} 失败: {}", bucketName, objectKey, e.getMessage(), e); log.error("下载 bucket {} 中对象 {} 失败: {}", bucketName, objectKey, e.getMessage(), e);
} }
} }
@Override
public PartListing listMultipart(String bucketName, String objectKey, String uploadId) {
try {
ListPartsRequest request = new ListPartsRequest(bucketName, objectKey, uploadId);
return amazonS3Client.listParts(request);
} catch (Exception e) {
log.error("errorMsg={}", e);
return null;
}
}
@Override
public InitiateMultipartUploadResult initMultipartUploadTask(String bucketName, String objectKey, ObjectMetadata metadata) {
try {
InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName, objectKey, metadata);
return amazonS3Client.initiateMultipartUpload(request);
} catch (Exception e) {
log.error("errorMsg={}", e);
return null;
}
}
@Override
public URL genePreSignedUrl(String bucketName, String objectKey, HttpMethod httpMethod, Date expiration, Map<String, Object> params) {
try {
GeneratePresignedUrlRequest genePreSignedUrlReq =
new GeneratePresignedUrlRequest(bucketName, objectKey, httpMethod)
.withExpiration(expiration);
//遍历params作为参数加到genePreSignedUrlReq里面,比如 添加上传ID和分片编号作为请求参数
//genePreSignedUrlReq.addRequestParameter("uploadId", uploadId);
//genePreSignedUrlReq.addRequestParameter("partNumber", String.valueOf(i));
for (Map.Entry<String, Object> entry : params.entrySet()) {
genePreSignedUrlReq.addRequestParameter(entry.getKey(), String.valueOf(entry.getValue()));
}
// 生成并获取预签名URL
return amazonS3Client.generatePresignedUrl(genePreSignedUrlReq);
} catch (Exception e) {
log.error("errorMsg={}", e);
return null;
}
}
@Override
public CompleteMultipartUploadResult mergeChunks(String bucketName, String objectKey, String uploadId, List<PartETag> partETags) {
CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest(bucketName, objectKey, uploadId, partETags);
return amazonS3Client.completeMultipartUpload(request);
}
} }

23
src/main/java/org/ycloud/aipan/component/OSSFileStoreEngine.java

@ -1,5 +1,6 @@
package org.ycloud.aipan.component; package org.ycloud.aipan.component;
import com.amazonaws.HttpMethod;
import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.*; import com.amazonaws.services.s3.model.*;
import jakarta.annotation.Resource; import jakarta.annotation.Resource;
@ -12,6 +13,7 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.URL;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -20,7 +22,6 @@ import java.util.concurrent.TimeUnit;
@Slf4j @Slf4j
@Component @Component
@Primary
public class OSSFileStoreEngine implements StoreEngine { public class OSSFileStoreEngine implements StoreEngine {
@Resource @Resource
private AmazonS3Client amazonS3Client; private AmazonS3Client amazonS3Client;
@ -148,6 +149,26 @@ public class OSSFileStoreEngine implements StoreEngine {
} }
} }
@Override
public PartListing listMultipart(String bucketName, String objectKey, String uploadId) {
return null;
}
@Override
public InitiateMultipartUploadResult initMultipartUploadTask(String bucketName, String objectKey, ObjectMetadata metadata) {
return null;
}
@Override
public URL genePreSignedUrl(String bucketName, String objectKey, HttpMethod httpMethod, Date expiration, Map<String, Object> params) {
return null;
}
@Override
public CompleteMultipartUploadResult mergeChunks(String bucketName, String objectKey, String uploadId, List<PartETag> partETags) {
return null;
}
// 拼接路径 // 拼接路径
// public static void main(String[] args) { // public static void main(String[] args) {
// String fileSeparator = System.getProperty("file.separator"); // String fileSeparator = System.getProperty("file.separator");

49
src/main/java/org/ycloud/aipan/component/StoreEngine.java

@ -1,11 +1,14 @@
package org.ycloud.aipan.component; package org.ycloud.aipan.component;
import com.amazonaws.services.s3.model.Bucket; import com.amazonaws.HttpMethod;
import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.*;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import java.net.URL;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
public interface StoreEngine { public interface StoreEngine {
@ -107,4 +110,46 @@ public interface StoreEngine {
* @param response HTTP响应对象用于输出下载的对象 * @param response HTTP响应对象用于输出下载的对象
*/ */
void download2Response(String bucketName, String objectKey, HttpServletResponse response); void download2Response(String bucketName, String objectKey, HttpServletResponse response);
/*===================分片上传相关=============================*/
/**
* 查询分片数据
* @param bucketName 存储桶名称
* @param objectKey 对象名称
* @param uploadId 分片上传ID
* @return 分片列表对象
*/
PartListing listMultipart(String bucketName, String objectKey, String uploadId);
/**
* 1-初始化分片上传任务,获取uploadId,如果初始化时有 uploadId说明是断点续传不能重新生成 uploadId
* @param bucketName 存储桶名称
* @param objectKey 对象名称
* @param metadata 对象元数据
* @return 初始化分片上传结果对象包含uploadId等信息
*/
InitiateMultipartUploadResult initMultipartUploadTask(String bucketName, String objectKey, ObjectMetadata metadata);
/**
* 2-生成分片上传地址返回给前端
* @param bucketName 存储桶名称
* @param objectKey 对象名称
* @param httpMethod HTTP方法如GETPUT等
* @param expiration 签名过期时间
* @param params 签名中包含的参数
* @return 生成的预签名URL
*/
URL genePreSignedUrl(String bucketName, String objectKey, HttpMethod httpMethod, Date expiration, Map<String,Object> params);
/**
* 3-合并分片
* @param bucketName 存储桶名称
* @param objectKey 对象名称
* @param uploadId 分片上传ID
* @param partETags 分片ETag列表用于验证分片的完整性
* @return 完成分片上传结果对象
*/
CompleteMultipartUploadResult mergeChunks(String bucketName, String objectKey, String uploadId, List<PartETag> partETags);
} }

19
src/main/java/org/ycloud/aipan/config/InterceptorConfig.java

@ -1,10 +1,12 @@
package org.ycloud.aipan.config; package org.ycloud.aipan.config;
import org.springframework.context.annotation.Bean;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import jakarta.annotation.Resource; import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.ycloud.aipan.interceptor.LoginInterceptor; import org.ycloud.aipan.interceptor.LoginInterceptor;
@ -26,4 +28,19 @@ public class InterceptorConfig implements WebMvcConfigurer {
"/api/share/*/check_share_code", "/api/share/*/visit", "/api/share/*/detail_no_code", "/api/share/*/detail_with_code"); "/api/share/*/check_share_code", "/api/share/*/visit", "/api/share/*/detail_no_code", "/api/share/*/detail_with_code");
} }
@Bean
public WebMvcConfigurer corsConfigurer() {
return new WebMvcConfigurer() {
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/api/**")
.allowedOriginPatterns("*") // 或指定域名
.allowedMethods("GET", "POST", "PUT", "OPTIONS")
.allowedHeaders("*")
.allowCredentials(true);
}
};
}
} }

51
src/main/java/org/ycloud/aipan/controller/FileController.java

@ -7,9 +7,11 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import org.ycloud.aipan.controller.req.*; import org.ycloud.aipan.controller.req.*;
import org.ycloud.aipan.dto.AccountFileDTO; import org.ycloud.aipan.dto.AccountFileDTO;
import org.ycloud.aipan.dto.FileChunkDTO;
import org.ycloud.aipan.dto.FolderTreeNodeDTO; import org.ycloud.aipan.dto.FolderTreeNodeDTO;
import org.ycloud.aipan.interceptor.LoginInterceptor; import org.ycloud.aipan.interceptor.LoginInterceptor;
import org.ycloud.aipan.service.AccountFileService; import org.ycloud.aipan.service.AccountFileService;
import org.ycloud.aipan.service.FileChunkService;
import org.ycloud.aipan.util.JsonData; import org.ycloud.aipan.util.JsonData;
import java.util.List; import java.util.List;
@ -22,6 +24,9 @@ public class FileController {
@Autowired @Autowired
private AccountFileService accountFileService; private AccountFileService accountFileService;
@Autowired
private FileChunkService fileChunkService;
/** /**
* 查询文件列表接口 * 查询文件列表接口
*/ */
@ -124,4 +129,50 @@ public class FileController {
Boolean flag = accountFileService.secondUpload(req); Boolean flag = accountFileService.secondUpload(req);
return JsonData.buildSuccess(flag); return JsonData.buildSuccess(flag);
} }
// 大文件上传、分享、回收站、下载、搜索
/**
* 1-创建分片上传任务
*/
@PostMapping("init_file_chunk_task")
public JsonData initFileChunkTask(@RequestBody FileChunkInitTaskReq req) {
req.setAccountId(LoginInterceptor.threadLocal.get().getId());
FileChunkDTO fileChunkDTO = fileChunkService.initFileChunkTask(req);
return JsonData.buildSuccess(fileChunkDTO);
}
/**
* 2-获取分片上传地址返回minio临时签名地址
*/
@GetMapping("/get_file_chunk_upload_url/{identifier}/{partNumber}")
public JsonData getFileChunkUploadUrl(@PathVariable("identifier") String identifier,@PathVariable("partNumber") int partNumber){
Long accountId = LoginInterceptor.threadLocal.get().getId();
String url = fileChunkService.genPreSignUploadUrl(accountId, identifier, partNumber);
return JsonData.buildSuccess(url);
}
/**
* 3-合并分片
*/
@PostMapping("merge_file_chunk")
public JsonData mergeFileChunk(@RequestBody FileChunkMergeReq req) {
req.setAccountId(LoginInterceptor.threadLocal.get().getId());
fileChunkService.mergeFileChunk(req);
return JsonData.buildSuccess();
}
/**
* 查询分片上传进度
*/
@GetMapping("/chunk_upload_progress/{identifier}")
public JsonData getUploadProgress(@PathVariable("identifier") String identifier) {
Long accountId = LoginInterceptor.threadLocal.get().getId();
FileChunkDTO fileChunkDTO = fileChunkService.listFileChunk(accountId, identifier);
return JsonData.buildSuccess(fileChunkDTO);
}
} }

26
src/main/java/org/ycloud/aipan/controller/req/FileChunkInitTaskReq.java

@ -0,0 +1,26 @@
package org.ycloud.aipan.controller.req;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
public class FileChunkInitTaskReq {
private Long accountId;
private String filename;
private String identifier;
/***
* 总大小
*/
private Long totalSize;
/**
* 分片大小
*/
private Long chunkSize;
}

17
src/main/java/org/ycloud/aipan/controller/req/FileChunkMergeReq.java

@ -0,0 +1,17 @@
package org.ycloud.aipan.controller.req;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
public class FileChunkMergeReq {
private String identifier;
private Long parentId;
private Long accountId;
}

66
src/main/java/org/ycloud/aipan/dto/FileChunkDTO.java

@ -0,0 +1,66 @@
package org.ycloud.aipan.dto;
import com.amazonaws.services.s3.model.PartSummary;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import org.ycloud.aipan.model.FileChunkDO;
import org.ycloud.aipan.util.SpringBeanUtil;
import java.util.List;
@Data
@NoArgsConstructor
@Accessors(chain = true)
public class FileChunkDTO {
public FileChunkDTO(FileChunkDO fileChunkDO){
SpringBeanUtil.copyProperties(fileChunkDO,this);
}
private Long id;
@Schema(description = "文件唯一标识(md5)")
private String identifier;
@Schema(description = "分片上传ID")
private String uploadId;
@Schema(description = "文件名")
private String fileName;
@Schema(description = "所属桶名")
private String bucketName;
@Schema(description = "文件的key")
private String objectKey;
@Schema(description = "总文件大小(byte)")
private Long totalSize;
@Schema(description = "每个分片大小(byte)")
private Long chunkSize;
@Schema(description = "分片数量")
private Integer chunkNum;
@Schema(description = "用户ID")
private Long accountId;
/**
* 是否完成上传
*/
private boolean finished;
/**
* 返回已经存在的分片
*/
private List<PartSummary> exitPartList;
}

3
src/main/java/org/ycloud/aipan/interceptor/LoginInterceptor.java

@ -32,7 +32,8 @@ public class LoginInterceptor implements HandlerInterceptor {
return true; return true;
} }
String token = request.getHeader("token"); // String token = request.getHeader("token");
String token ="YUANeyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJZVUFOIiwiYWNjb3VudElkIjoxODkwNDI0MTU2NjgwMzgwNDE4LCJ1c2VybmFtZSI6Inl1YW4iLCJpYXQiOjE3NTM1MzM1MTgsImV4cCI6MTc1NDEzODMxOH0.WeyTmVvbHqwaBIYLm0tiK7nOJWRQ4Q-jkLilJW0hR8U";
if(StringUtils.isBlank(token)){ if(StringUtils.isBlank(token)){
token = request.getParameter("token"); token = request.getParameter("token");
} }

3
src/main/java/org/ycloud/aipan/model/FileChunkDO.java

@ -7,8 +7,10 @@ import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date; import java.util.Date;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import lombok.experimental.Accessors;
/** /**
* <p> * <p>
@ -22,6 +24,7 @@ import lombok.Setter;
@Setter @Setter
@TableName("file_chunk") @TableName("file_chunk")
@Schema(name = "FileChunkDO", description = "文件分片信息表") @Schema(name = "FileChunkDO", description = "文件分片信息表")
@Accessors(chain = true)
public class FileChunkDO implements Serializable { public class FileChunkDO implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;

3
src/main/java/org/ycloud/aipan/service/AccountFileService.java

@ -72,4 +72,7 @@ public interface AccountFileService {
* 3建立关系 * 3建立关系
*/ */
Boolean secondUpload(FileSecondUploadReq req); Boolean secondUpload(FileSecondUploadReq req);
void saveFileAndAccountFile(FileUploadReq req, String storeFileObjectKey);
} }

28
src/main/java/org/ycloud/aipan/service/FileChunkService.java

@ -0,0 +1,28 @@
package org.ycloud.aipan.service;
import org.ycloud.aipan.controller.req.FileChunkInitTaskReq;
import org.ycloud.aipan.controller.req.FileChunkMergeReq;
import org.ycloud.aipan.dto.FileChunkDTO;
public interface FileChunkService {
/**
* 初始化分片上传
*/
FileChunkDTO initFileChunkTask(FileChunkInitTaskReq req);
/**
* 获取临时文件上传地址
*/
String genPreSignUploadUrl(Long accountId, String identifier, Integer partNumber);
/**
* 合并分片
*/
void mergeFileChunk(FileChunkMergeReq req);
/**
* 查询分片上传进度
*/
FileChunkDTO listFileChunk(Long accountId, String identifier);
}

1
src/main/java/org/ycloud/aipan/service/impl/AccountFileServiceImpl.java

@ -383,6 +383,7 @@ public class AccountFileServiceImpl implements AccountFileService {
* @param req * @param req
* @param storeFileObjectKey * @param storeFileObjectKey
*/ */
@Override
public void saveFileAndAccountFile(FileUploadReq req, String storeFileObjectKey) { public void saveFileAndAccountFile(FileUploadReq req, String storeFileObjectKey) {
//保存文件 //保存文件
FileDO fileDO = saveFile(req, storeFileObjectKey); FileDO fileDO = saveFile(req, storeFileObjectKey);

172
src/main/java/org/ycloud/aipan/service/impl/FileChunkServiceImpl.java

@ -0,0 +1,172 @@
package org.ycloud.aipan.service.impl;
import cn.hutool.core.date.DateUtil;
import com.amazonaws.HttpMethod;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.MediaTypeFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.ycloud.aipan.component.StoreEngine;
import org.ycloud.aipan.config.MinioConfig;
import org.ycloud.aipan.controller.req.FileChunkInitTaskReq;
import org.ycloud.aipan.controller.req.FileChunkMergeReq;
import org.ycloud.aipan.controller.req.FileUploadReq;
import org.ycloud.aipan.dto.FileChunkDTO;
import org.ycloud.aipan.enums.BizCodeEnum;
import org.ycloud.aipan.exception.BizException;
import org.ycloud.aipan.mapper.FileChunkMapper;
import org.ycloud.aipan.mapper.StorageMapper;
import org.ycloud.aipan.model.FileChunkDO;
import org.ycloud.aipan.model.StorageDO;
import org.ycloud.aipan.service.AccountFileService;
import org.ycloud.aipan.service.FileChunkService;
import org.ycloud.aipan.util.CommonUtil;
import com.amazonaws.services.s3.model.*;
import java.net.URL;
import java.util.*;
import java.util.stream.Collectors;
@Service
@Slf4j
public class FileChunkServiceImpl implements FileChunkService {
@Autowired
private StorageMapper storageMapper;
@Autowired
private StoreEngine fileStoreEngine;
@Autowired
private MinioConfig minioConfig;
@Autowired
private FileChunkMapper fileChunkMapper;
@Autowired
private AccountFileService accountFileService;
@Override
@Transactional(rollbackFor = Exception.class)
public FileChunkDTO initFileChunkTask(FileChunkInitTaskReq req) {
//检查存储空间是否够
StorageDO storageDO = storageMapper.selectOne(new QueryWrapper<StorageDO>().eq("account_id", req.getAccountId()));
if (storageDO.getUsedSize() + req.getTotalSize() > storageDO.getTotalSize()) {
throw new BizException(BizCodeEnum.FILE_STORAGE_NOT_ENOUGH);
}
String objectKey = CommonUtil.getFilePath(req.getFilename());
// 根据文件名推断内容类型
String contentType = MediaTypeFactory.getMediaType(objectKey).orElse(MediaType.APPLICATION_OCTET_STREAM).toString();
// 设置文件元数据
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType(contentType);
// 初始化分片上传,获取上传ID
String uploadId = fileStoreEngine.initMultipartUploadTask(minioConfig.getBucketName(), objectKey, objectMetadata).getUploadId();
// 创建上传任务实体并设置相关属性
FileChunkDO task = new FileChunkDO();
int chunkNum = (int) Math.ceil(req.getTotalSize() * 1.0 / req.getChunkSize());
task.setBucketName(minioConfig.getBucketName())
.setChunkNum(chunkNum)
.setChunkSize(req.getChunkSize())
.setTotalSize(req.getTotalSize())
.setIdentifier(req.getIdentifier())
.setFileName(req.getFilename())
.setObjectKey(objectKey)
.setUploadId(uploadId)
.setAccountId(req.getAccountId());
// 将任务插入数据库
fileChunkMapper.insert(task);
// 构建并返回任务信息DTO
return new FileChunkDTO(task).setFinished(false).setExitPartList(new ArrayList<>());
}
@Override
public String genPreSignUploadUrl(Long accountId, String identifier, Integer partNumber) {
FileChunkDO task = fileChunkMapper.selectOne(new QueryWrapper<FileChunkDO>().lambda().eq(FileChunkDO::getIdentifier, identifier).eq(FileChunkDO::getAccountId, accountId));
if (task == null) {
throw new BizException(BizCodeEnum.FILE_CHUNK_TASK_NOT_EXISTS);
}
//配置预签名过期时间
Date expireDate = DateUtil.offsetMillisecond(new Date(), minioConfig.getPRE_SIGN_URL_EXPIRE().intValue());
// 生成预签名URL
Map<String, Object> params = new HashMap<>();
params.put("partNumber", partNumber.toString());
params.put("uploadId", task.getUploadId());
URL preSignedUrl = fileStoreEngine.genePreSignedUrl(minioConfig.getBucketName(), task.getObjectKey(), HttpMethod.PUT, expireDate, params);
log.info("生成预签名URL地址 identifier={},partNumber={}, preSignedUrl={}", identifier, partNumber, preSignedUrl.toString());
return preSignedUrl.toString();
}
public void mergeFileChunk(FileChunkMergeReq req) {
//获取任务和分片列表,检查是否足够合并
FileChunkDO task = fileChunkMapper.selectOne(new QueryWrapper<FileChunkDO>()
.eq("account_id", req.getAccountId())
.eq("identifier", req.getIdentifier()));
if(task == null){
throw new BizException(BizCodeEnum.FILE_CHUNK_TASK_NOT_EXISTS);
}
PartListing partListing = fileStoreEngine.listMultipart(task.getBucketName(), task.getObjectKey(), task.getUploadId());
List<PartSummary> parts = partListing.getParts();
if(parts.size() != task.getChunkNum()){
//上传的分片数量和记录中不对应,合并失败
throw new BizException(BizCodeEnum.FILE_CHUNK_NOT_ENOUGH);
}
//检查更新存储空间
StorageDO storageDO = storageMapper.selectOne(new QueryWrapper<>(new StorageDO())
.eq("account_id", req.getAccountId()));
long realFileTotalSize = parts.stream().map(PartSummary::getSize).mapToLong(Long::valueOf).sum();
if(storageDO.getUsedSize() + realFileTotalSize > storageDO.getTotalSize()){
throw new BizException(BizCodeEnum.FILE_STORAGE_NOT_ENOUGH);
}
storageDO.setUsedSize(storageDO.getUsedSize() +realFileTotalSize);
storageMapper.updateById(storageDO);
//2-合并文件
CompleteMultipartUploadResult result = fileStoreEngine.mergeChunks(task.getBucketName(),
task.getObjectKey(), task.getUploadId(),
parts.stream().map(partSummary ->
new PartETag(partSummary.getPartNumber(), partSummary.getETag()))
.collect(Collectors.toList()));
//【判断是否合并成功
if(result.getETag()!=null){
FileUploadReq fileUploadReq = new FileUploadReq();
fileUploadReq.setAccountId(req.getAccountId())
.setFilename(task.getFileName())
.setIdentifier(task.getIdentifier())
.setParentId(req.getParentId())
.setFileSize(realFileTotalSize)
.setFile(null);
//存储文件和关联信息到数据库
accountFileService.saveFileAndAccountFile(fileUploadReq,task.getObjectKey());
//删除相关任务记录
fileChunkMapper.deleteById(task.getId());
log.info("合并成功");
}
}
@Override
public FileChunkDTO listFileChunk(Long accountId, String identifier) {
// 获取任务和分片列表,检查是否足够
FileChunkDO task = fileChunkMapper.selectOne(new QueryWrapper<FileChunkDO>().lambda().eq(FileChunkDO::getAccountId, accountId));
if (task == null || !identifier.equals(task.getIdentifier())) {
return null;
}
FileChunkDTO result = new FileChunkDTO(task);
boolean doesObjectExist = fileStoreEngine.doesObjectExist(task.getBucketName(), task.getObjectKey());
if (!doesObjectExist) {
// 不存在,表示未上传完,返回已上传的分片
PartListing partListing = fileStoreEngine.listMultipart(task.getBucketName(), task.getObjectKey(), task.getUploadId());
if(task.getChunkNum() == partListing.getParts().size()){
//已经存在,合并
result.setFinished(true).setExitPartList(partListing.getParts());
}else {
result.setFinished(false).setExitPartList(partListing.getParts());
}
}
return result;
}
}

1
src/main/resources/application.yml

@ -58,4 +58,3 @@ oss:
access-key: LTAI5tRQFFPQWHPZksM9XGHG access-key: LTAI5tRQFFPQWHPZksM9XGHG
access-secret: z4ZSJffdH525Konxz7LBxOSAZP2BXN access-secret: z4ZSJffdH525Konxz7LBxOSAZP2BXN
bucket-name: forward-tech bucket-name: forward-tech

425
src/main/resources/static/fileupload.html

@ -0,0 +1,425 @@
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8"/>
<title>大文件上传(断点续传版)</title>
<style>
/* 原有样式不变,只新增 .hidden 控制隐藏 */
* {
margin: 0;
padding: 0;
box-sizing: border-box
}
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
display: flex;
min-height: 100vh;
align-items: center;
justify-content: center;
padding: 20px
}
.upload-container {
background: rgba(255, 255, 255, .95);
border-radius: 20px;
padding: 40px;
width: 100%;
max-width: 600px;
box-shadow: 0 20px 40px rgba(0, 0, 0, .1)
}
.upload-header {
text-align: center;
margin-bottom: 30px
}
.upload-header h1 {
font-size: 32px;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent
}
.upload-area {
border: 2px dashed #667eea;
border-radius: 15px;
padding: 60px 20px;
text-align: center;
cursor: pointer;
transition: all .3s;
position: relative
}
.upload-area:hover {
border-color: #764ba2;
background: rgba(118, 75, 162, .05)
}
.upload-area.dragover {
border-color: #764ba2;
background: rgba(118, 75, 162, .1);
transform: scale(1.02)
}
.upload-icon {
font-size: 48px;
margin-bottom: 15px;
animation: float 3s ease-in-out infinite
}
@keyframes float {
0%, 100% {
transform: translateY(0)
}
50% {
transform: translateY(-10px)
}
}
.file-input {
display: none
}
.file-info, .progress-container, .success-message, .error-message {
margin-top: 20px;
display: none
}
.file-info.active, .progress-container.active {
display: block;
animation: fadeIn .3s
}
@keyframes fadeIn {
from {
opacity: 0
}
to {
opacity: 1
}
}
.progress-bar-bg {
background: #e0e0e0;
border-radius: 10px;
height: 20px;
overflow: hidden;
position: relative
}
.progress-bar {
background: linear-gradient(90deg, #667eea 0%, #764ba2 100%);
height: 100%;
width: 0;
transition: width .3s;
position: relative;
overflow: hidden
}
.progress-bar::after {
content: '';
position: absolute;
top: 0;
left: 0;
bottom: 0;
right: 0;
background: linear-gradient(90deg, transparent, rgba(255, 255, 255, .3), transparent);
animation: shimmer 2s infinite
}
@keyframes shimmer {
0% {
transform: translateX(-100%)
}
100% {
transform: translateX(100%)
}
}
.progress-text {
text-align: center;
margin-top: 10px;
color: #666;
font-size: 14px
}
.upload-btn {
margin-top: 20px;
padding: 12px 40px;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
color: #fff;
border: none;
border-radius: 25px;
font-size: 16px;
cursor: pointer;
transition: all .3s;
display: none
}
.upload-btn:hover {
transform: translateY(-2px);
box-shadow: 0 5px 15px rgba(0, 0, 0, .2)
}
.upload-btn.active {
display: inline-block;
animation: bounceIn .5s
}
@keyframes bounceIn {
0% {
transform: scale(.8);
opacity: 0
}
50% {
transform: scale(1.1)
}
100% {
transform: scale(1);
opacity: 1
}
}
.hidden {
display: none
}
.loading-spinner {
display: none;
width: 40px;
height: 40px;
margin: 20px auto;
border: 4px solid #f3f3f3;
border-top: 4px solid #667eea;
border-radius: 50%;
animation: spin 1s linear infinite
}
@keyframes spin {
0% {
transform: rotate(0deg)
}
100% {
transform: rotate(360deg)
}
}
</style>
</head>
<body>
<div class="upload-container">
<div class="upload-header">
<h1>大文件上传(断点续传)</h1>
<p>支持秒传、断点续传,最大支持2GB</p>
</div>
<!-- 拖拽区域 -->
<div class="upload-area" id="uploadArea">
<div class="upload-icon">📁</div>
<p>点击或拖拽文件到此处上传</p>
<input type="file" id="fileInput" class="file-input"/>
</div>
<!-- 文件信息 -->
<div class="file-info" id="fileInfo">
<div class="file-name" id="fileName"></div>
<div class="file-size" id="fileSize"></div>
</div>
<!-- 进度 -->
<div class="progress-container" id="progressContainer">
<div class="progress-bar-bg">
<div class="progress-bar" id="progressBar"></div>
</div>
<div class="progress-text" id="progressText">0%</div>
<div class="chunk-info" id="chunkInfo"></div>
</div>
<!-- 操作按钮 -->
<button class="upload-btn" id="uploadBtn">开始上传</button>
<div class="loading-spinner" id="loadingSpinner"></div>
<!-- 结果提示 -->
<div class="success-message" id="successMessage">✅ 上传成功</div>
<div class="error-message" id="errorMessage">❌ 上传失败</div>
</div>
<!-- SparkMD5 计算 MD5 -->
<script src="https://cdn.jsdelivr.net/npm/spark-md5@3.0.2/spark-md5.min.js"></script>
<script>
const uploadArea = document.getElementById('uploadArea');
const fileInput = document.getElementById('fileInput');
const fileName = document.getElementById('fileName');
const fileSize = document.getElementById('fileSize');
const progressBar = document.getElementById('progressBar');
const progressText = document.getElementById('progressText');
const chunkInfo = document.getElementById('chunkInfo');
const uploadBtn = document.getElementById('uploadBtn');
const loadingSpinner = document.getElementById('loadingSpinner');
const successBox = document.getElementById('successMessage');
const errorBox = document.getElementById('errorMessage');
const fileInfoBox = document.getElementById('fileInfo');
const progressBox = document.getElementById('progressContainer');
let file = null;
let identifier = '';
const chunkSize = 5 * 1024 * 1024; // 5MB
let totalChunks = 0;
let uploadedParts = new Set(); // 已上传分片编号
/* ---------- 拖拽&选择 ---------- */
uploadArea.addEventListener('click', () => fileInput.click());
fileInput.addEventListener('change', e => handleFile(e.target.files[0]));
uploadArea.addEventListener('dragover', e => {
e.preventDefault();
uploadArea.classList.add('dragover');
});
uploadArea.addEventListener('dragleave', () => uploadArea.classList.remove('dragover'));
uploadArea.addEventListener('drop', e => {
e.preventDefault();
uploadArea.classList.remove('dragover');
handleFile(e.dataTransfer.files[0]);
});
function handleFile(f) {
if (!f) return;
file = f;
fileName.textContent = f.name;
fileSize.textContent = formatSize(f.size);
fileInfoBox.classList.add('active');
uploadBtn.classList.add('active');
}
function formatSize(b) {
const units = ['B', 'KB', 'MB', 'GB'];
let i = 0;
while (b >= 1024 && i < 3) {
b /= 1024;
i++;
}
return b.toFixed(2) + ' ' + units[i];
}
/* ---------- 工具 ---------- */
async function md5File(f) {
return new Promise((resolve, reject) => {
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
const chunks = Math.ceil(f.size / chunkSize);
let currentChunk = 0;
const spark = new SparkMD5.ArrayBuffer();
const reader = new FileReader();
reader.onload = e => {
spark.append(e.target.result);
currentChunk++;
if (currentChunk < chunks) loadNext();
else resolve(spark.end());
};
reader.onerror = () => reject('FileReader error');
function loadNext() {
const start = currentChunk * chunkSize;
const end = Math.min(start + chunkSize, f.size);
reader.readAsArrayBuffer(blobSlice.call(f, start, end));
}
loadNext();
});
}
/* ---------- 上传主流程 ---------- */
uploadBtn.addEventListener('click', startUpload);
async function startUpload() {
if (!file) return;
uploadBtn.style.display = 'none';
loadingSpinner.style.display = 'block';
progressBox.classList.add('active');
try {
identifier = await md5File(file);
totalChunks = Math.ceil(file.size / chunkSize);
// 查询已上传的分片
const progressResp = await fetch(`http://localhost:8081/api/file/v1/chunk_upload_progress/${identifier}`);
const progressData = await progressResp.json();
if (progressData.code ===0 && progressData.data != null && progressData.data.exitPartList) {
progressData.data.exitPartList.forEach(p => uploadedParts.add(p.partNumber));
}
// 初始化任务(已存在会秒传)
const initResp = await fetch('http://localhost:8081/api/file/v1/init_file_chunk_task', {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({
filename: file.name,
identifier,
totalSize: file.size,
chunkSize
})
});
const initData = await initResp.json();
if (initData.code !== 0) throw new Error(initData.msg || '初始化失败');
const uploadId = initData.data.uploadId;
// 上传缺失分片
let uploaded = uploadedParts.size;
for (let i = 0; i < totalChunks; i++) {
const partNumber = i + 1;
if (uploadedParts.has(partNumber)) continue;
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
// 获取预签名 URL
const urlResp = await fetch(`http://localhost:8081/api/file/v1/get_file_chunk_upload_url/${identifier}/${partNumber}`);
const urlData = await urlResp.json();
if (urlData.code !== 0) throw new Error(urlData.msg || '获取URL失败');
// PUT 上传
await fetch(urlData.data, {method: 'PUT', body: chunk});
uploaded++;
const percent = (uploaded / totalChunks) * 100;
progressBar.style.width = percent + '%';
progressText.textContent = Math.round(percent) + '%';
chunkInfo.textContent = `正在上传第 ${partNumber} / ${totalChunks} 个分片`;
}
// 合并
const mergeResp = await fetch('http://localhost:8081/api/file/v1/merge_file_chunk', {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({identifier: identifier, parentId: 0})
});
const mergeData = await mergeResp.json();
if (mergeData.code !== 0) throw new Error(mergeData.msg || '合并失败');
loadingSpinner.style.display = 'none';
successBox.style.display = 'block';
reset();
} catch (e) {
loadingSpinner.style.display = 'none';
errorBox.textContent = `❌ ${e.message || '上传出错'}`;
errorBox.style.display = 'block';
}
}
function reset() {
setTimeout(() => {
fileInfoBox.classList.remove('active');
progressBox.classList.remove('active');
uploadBtn.classList.remove('active');
uploadBtn.style.display = '';
[successBox, errorBox].forEach(b => b.style.display = 'none');
progressBar.style.width = '0%';
progressText.textContent = '0%';
file = null;
fileInput.value = '';
}, 3000);
}
</script>
</body>
</html>

1
src/main/resources/static/spark-md5.min.js

File diff suppressed because one or more lines are too long

2
src/test/java/org/ycloud/aipan/AmazonS3ClientTests.java

@ -41,7 +41,7 @@ class AmazonS3ClientTests {
*/ */
@Test @Test
public void testCreateBucket() { public void testCreateBucket() {
String bucketName = "avatar"; String bucketName = "ai-pan";
Bucket bucket = amazonS3Client.createBucket(bucketName); Bucket bucket = amazonS3Client.createBucket(bucketName);
log.info("bucket:{}", bucket); log.info("bucket:{}", bucket);
} }

150
src/test/java/org/ycloud/aipan/BigFileUploadTest.java

@ -0,0 +1,150 @@
package org.ycloud.aipan;
import com.amazonaws.HttpMethod;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.*;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import java.net.URL;
import java.util.*;
import java.util.stream.Collectors;
@SpringBootTest
@Slf4j
public class BigFileUploadTest {
@Autowired
private AmazonS3Client amazonS3Client;
//=====================大文件上传相关接口===========================
/**
* 第一步初始化大文件分片上传任务获取uploadId
* 如果初始化时有 uploadId说明是断点续传不能重新生成 uploadId
*/
@Test
public void testInitiateMultipartUploadTask() {
String bucketName = "ai-pan";
String objectKey = "/meta/test5.txt";
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType("text/plain");
// objectMetadata.setContentType("application/vnd.openxmlformats-officedocument.wordprocessingml.document");
//初始化分片上传请求
InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, objectKey, objectMetadata);
//初始化分片上传任务
InitiateMultipartUploadResult uploadResult = amazonS3Client.initiateMultipartUpload(initRequest);
String uploadId = uploadResult.getUploadId();
log.info("uploadId:{}", uploadId);
}
@Test
public void testGenePreSignedUrls() {
// 定义对象键名
String objectKey = "/meta/test5.txt";
// 定义存储桶名称
String bucket = "ai-pan";
// 定义分片数量,这里设置为4个分片
int chunkCount = 4;
String uploadId = "NzViMDY5NGUtM2IwOC00ZDhkLTk1ODMtM2EyYThhZGFmNmI3LjVhMDlmZDJkLWE2ODgtNDgwNS1hODQwLWZhMTEwNWI0NTUwZA";
// 创建用于存储分片URL的列表
List<String> partList = new ArrayList<>();
// 遍历每个分片,生成预签名的URL
for (int i = 1; i <= chunkCount; i++) {
// 生成预签名的 URL, 设置过期时间,例如 1 小时后
Date expiration = new Date(System.currentTimeMillis() + 3600 * 1000);
// 创建生成预签名URL的请求,并指定HTTP方法为PUT
GeneratePresignedUrlRequest genePreSignedUrlReq = new GeneratePresignedUrlRequest(bucket, objectKey, HttpMethod.PUT).withExpiration(expiration);
// 添加上传ID和分片编号作为请求参数
genePreSignedUrlReq.addRequestParameter("uploadId", uploadId);
genePreSignedUrlReq.addRequestParameter("partNumber", String.valueOf(i));
// 生成并获取预签名URL
URL url = amazonS3Client.generatePresignedUrl(genePreSignedUrlReq);
// 将生成的URL添加到列表中
partList.add(url.toString());
// 日志输出当前分片的URL列表
log.info("partList:{}", partList);
}
}
// 测试合并分片的方法
@Test
public void testMergeChunks() {
// 定义对象键名
String objectKey = "/meta/test5.txt";
// 定义存储桶名称
String bucket = "ai-pan";
// 定义分片数量,这里设置为4个分片
int chunkCount = 4;
// 定义上传ID,用于标识特定的分片上传事件
String uploadId = "NzViMDY5NGUtM2IwOC00ZDhkLTk1ODMtM2EyYThhZGFmNmI3LjVhMDlmZDJkLWE2ODgtNDgwNS1hODQwLWZhMTEwNWI0NTUwZA";
// 创建一个列出分片请求对象
ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, objectKey, uploadId);
// 获取分片列表
PartListing partListing = amazonS3Client.listParts(listPartsRequest);
List<PartSummary> parts = partListing.getParts();
// 检查分片数量是否与预期一致
if (chunkCount != parts.size()) {
// 已上传分块数量与记录中的数量不对应,不能合并分片
throw new RuntimeException("分片缺失,请重新上传");
}
// 创建一个完成分片上传请求对象
CompleteMultipartUploadRequest completeMultipartUploadRequest = new CompleteMultipartUploadRequest()
.withUploadId(uploadId)
.withKey(objectKey)
.withBucketName(bucket)
.withPartETags(parts.stream()
// 将每个分片的编号和ETag封装到PartETag对象中
.map(partSummary -> new PartETag(partSummary.getPartNumber(), partSummary.getETag()))
.collect(Collectors.toList()));
// 完成分片上传并获取结果
CompleteMultipartUploadResult result = amazonS3Client.completeMultipartUpload(completeMultipartUploadRequest);
log.info("result:{}", result.getBucketName());
}
@Test
// 测试列出分片上传的各个分片信息
public void testListParts() {
// 定义对象键名
String objectKey = "/meta/test5.txt";
// 定义存储桶名称
String bucket = "ai-pan";
// 定义上传ID,用于标识特定的分片上传事件
String uploadId = "ZjFkZjRhN2UtNzMzOS04NTUxLTgwOTEtNWViNzUwNmRmYTEzLmE4NTUyMmQyLTM1NjUtNGMwMS05ZTY2LWQ5MWQ4NDUyBmIyA";
// 检查指定的存储桶中是否存在具有指定对象键名的对象
boolean doesObjectExist = amazonS3Client.doesObjectExist(bucket, objectKey);
if (!doesObjectExist) {
// 未上传完,返回已上传的分片
ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, objectKey, uploadId);
PartListing partListing = amazonS3Client.listParts(listPartsRequest);
List<PartSummary> parts = partListing.getParts();
// 创建一个结果映射,用于存放上传状态和分片列表
Map<String, Object> result = new HashMap<>();
result.put("finished", false);
result.put("exitPartList", parts);
//前端可以通过这个判断是否要调用合并merge接口
log.info("result:{}", result);
// 遍历并打印每个分片的信息
for (PartSummary partSummary : parts) {
System.out.println("getPartNumber:" + partSummary.getPartNumber() + ",getETag=" + partSummary.getETag() + ",getSize= " + partSummary.getSize() + ",getLastModified=" + partSummary.getLastModified());
}
// 打印存储桶名称
System.out.println(partListing.getBucketName());
}
}
}

158
src/test/java/org/ycloud/aipan/FileChunkUploadTests.java

@ -0,0 +1,158 @@
package org.ycloud.aipan;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.assertj.core.util.Lists;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.ycloud.aipan.controller.req.FileChunkInitTaskReq;
import org.ycloud.aipan.controller.req.FileChunkMergeReq;
import org.ycloud.aipan.dto.FileChunkDTO;
import org.ycloud.aipan.service.FileChunkService;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.http.HttpClient;
import java.net.http.HttpResponse;
import java.util.List;
@SpringBootTest
@Slf4j
class FileChunkUploadTests {
@Autowired
private FileChunkService fileChunkService;
private Long accountId = 3L;
private String identifier = "abcsdfsd";
/**
* 存储分片后端的文件路径和名称
*/
private final List<String> chunkFilePaths = Lists.newArrayList();
/**
* 存储分片上传地址
*/
private final List<String> chunkUploadUrls = Lists.newArrayList();
/**
* 上传ID
*/
private String uploadId;
/**
* 分片大小5MB
*/
private final long chunkSize = 5 * 1024 * 1024;
/**
* 用一个10MB以上的文件按5MB分片大小进行分片
*/
@Test
public void testCreateChunkFiles() {
// 将文件分片存储
String filePath = "/Users/xdclass/Desktop/chunk/es_note.pdf";
File file = new File(filePath);
long fileSize = file.length();
//int chunkCount = (int) Math.ceil((double) fileSize / CHUNK_SIZE);
int chunkCount = (int) Math.ceil(fileSize * 1.0 / chunkSize);
log.info("创建分片数量是: {} chunks", chunkCount);
try (FileInputStream fis = new FileInputStream(file)) {
byte[] buffer = new byte[(int) chunkSize];
for (int i = 0; i < chunkCount; i++) {
String chunkFileName = filePath + ".part" + (i + 1);
try (FileOutputStream fos = new FileOutputStream(chunkFileName)) {
int bytesRead = fis.read(buffer);
fos.write(buffer, 0, bytesRead);
log.info("创建的分片文件名: {} ({} bytes)", chunkFileName, bytesRead);
chunkFilePaths.add(chunkFileName);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 第1步创建分片上传任务
*/
private void testInitFileChunkTask() {
FileChunkInitTaskReq req = new FileChunkInitTaskReq();
req.setAccountId(accountId).setFilename("es_note.pdf")
.setTotalSize((long) (20552959))//20552959
.setChunkSize((long) (5 * 1024 * 1024))//5242880
.setIdentifier(identifier);
FileChunkDTO fileChunkDTO = fileChunkService.initFileChunkTask(req);
log.info("分片上传初始化结果: {}", fileChunkDTO);
uploadId = fileChunkDTO.getUploadId();
testGetFileChunkUploadUrl();
}
/**
* 第2步获取分片上传地址返回临时MinIO地址前端直接上传到Minio里面
*/
private void testGetFileChunkUploadUrl() {
for (int i = 1; i <= chunkFilePaths.size(); i++) {
String uploadUrl = fileChunkService.genPreSignUploadUrl(accountId, identifier, i);
log.info("分片上传地址: {}", uploadUrl);
//存储4个分片地址
chunkUploadUrls.add(uploadUrl);
}
uploadChunk();
}
/**
* 模拟前端直接上传分片
*/
@SneakyThrows
private void uploadChunk() {
CloseableHttpClient httpClient = HttpClients.createDefault();
for (int i = 0; i < chunkUploadUrls.size(); i++) {
// PUT直接上传到minio
String chunkUploadId = chunkUploadUrls.get(i);
HttpPut httpPut = new HttpPut(chunkUploadId);
httpPut.setHeader("Content-Type","application/octet-stream");
File chunkFile = new File(chunkFilePaths.get(i));
FileEntity chunkFileEntity = new FileEntity(chunkFile);
httpPut.setEntity(chunkFileEntity);
CloseableHttpResponse chunkUploadResp = httpClient.execute(httpPut);
httpPut.releaseConnection();
}
}
/**
* 测试合并分片
*/
@Test
public void testMergeFileChunk() {
FileChunkMergeReq req = new FileChunkMergeReq();
req.setAccountId(accountId).setIdentifier(identifier).setParentId(233L);
fileChunkService.mergeFileChunk(req);
}
/**
* 查询分片上传进度
*/
@Test
public void testChunkUploadProgress() {
FileChunkDTO fileChunkDTO = fileChunkService.listFileChunk(accountId, identifier);
log.info("分片上传进度: {}", fileChunkDTO);
}
}
Loading…
Cancel
Save