22 changed files with 1295 additions and 38 deletions
@ -0,0 +1,26 @@ |
|||
package org.ycloud.aipan.controller.req; |
|||
|
|||
import lombok.Data; |
|||
import lombok.experimental.Accessors; |
|||
|
|||
@Data |
|||
@Accessors(chain = true) |
|||
public class FileChunkInitTaskReq { |
|||
|
|||
private Long accountId; |
|||
|
|||
private String filename; |
|||
|
|||
private String identifier; |
|||
|
|||
/*** |
|||
* 总大小 |
|||
*/ |
|||
private Long totalSize; |
|||
|
|||
/** |
|||
* 分片大小 |
|||
*/ |
|||
private Long chunkSize; |
|||
|
|||
} |
@ -0,0 +1,17 @@ |
|||
package org.ycloud.aipan.controller.req; |
|||
|
|||
import lombok.Data; |
|||
import lombok.experimental.Accessors; |
|||
|
|||
|
|||
@Data |
|||
@Accessors(chain = true) |
|||
public class FileChunkMergeReq { |
|||
|
|||
private String identifier; |
|||
|
|||
private Long parentId; |
|||
|
|||
private Long accountId; |
|||
|
|||
} |
@ -0,0 +1,66 @@ |
|||
package org.ycloud.aipan.dto; |
|||
|
|||
import com.amazonaws.services.s3.model.PartSummary; |
|||
import io.swagger.v3.oas.annotations.media.Schema; |
|||
import lombok.Data; |
|||
import lombok.NoArgsConstructor; |
|||
import lombok.experimental.Accessors; |
|||
import org.ycloud.aipan.model.FileChunkDO; |
|||
import org.ycloud.aipan.util.SpringBeanUtil; |
|||
|
|||
|
|||
import java.util.List; |
|||
|
|||
|
|||
@Data |
|||
@NoArgsConstructor |
|||
@Accessors(chain = true) |
|||
public class FileChunkDTO { |
|||
|
|||
|
|||
public FileChunkDTO(FileChunkDO fileChunkDO){ |
|||
SpringBeanUtil.copyProperties(fileChunkDO,this); |
|||
} |
|||
|
|||
|
|||
private Long id; |
|||
|
|||
@Schema(description = "文件唯一标识(md5)") |
|||
private String identifier; |
|||
|
|||
@Schema(description = "分片上传ID") |
|||
private String uploadId; |
|||
|
|||
@Schema(description = "文件名") |
|||
private String fileName; |
|||
|
|||
@Schema(description = "所属桶名") |
|||
private String bucketName; |
|||
|
|||
@Schema(description = "文件的key") |
|||
private String objectKey; |
|||
|
|||
@Schema(description = "总文件大小(byte)") |
|||
private Long totalSize; |
|||
|
|||
@Schema(description = "每个分片大小(byte)") |
|||
private Long chunkSize; |
|||
|
|||
@Schema(description = "分片数量") |
|||
private Integer chunkNum; |
|||
|
|||
@Schema(description = "用户ID") |
|||
private Long accountId; |
|||
|
|||
|
|||
/** |
|||
* 是否完成上传 |
|||
*/ |
|||
private boolean finished; |
|||
|
|||
/** |
|||
* 返回已经存在的分片 |
|||
*/ |
|||
private List<PartSummary> exitPartList; |
|||
|
|||
} |
@ -0,0 +1,28 @@ |
|||
package org.ycloud.aipan.service; |
|||
|
|||
|
|||
import org.ycloud.aipan.controller.req.FileChunkInitTaskReq; |
|||
import org.ycloud.aipan.controller.req.FileChunkMergeReq; |
|||
import org.ycloud.aipan.dto.FileChunkDTO; |
|||
|
|||
public interface FileChunkService { |
|||
/** |
|||
* 初始化分片上传 |
|||
*/ |
|||
FileChunkDTO initFileChunkTask(FileChunkInitTaskReq req); |
|||
|
|||
/** |
|||
* 获取临时文件上传地址 |
|||
*/ |
|||
String genPreSignUploadUrl(Long accountId, String identifier, Integer partNumber); |
|||
|
|||
/** |
|||
* 合并分片 |
|||
*/ |
|||
void mergeFileChunk(FileChunkMergeReq req); |
|||
|
|||
/** |
|||
* 查询分片上传进度 |
|||
*/ |
|||
FileChunkDTO listFileChunk(Long accountId, String identifier); |
|||
} |
@ -0,0 +1,172 @@ |
|||
package org.ycloud.aipan.service.impl; |
|||
|
|||
import cn.hutool.core.date.DateUtil; |
|||
import com.amazonaws.HttpMethod; |
|||
import com.amazonaws.services.s3.model.ObjectMetadata; |
|||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; |
|||
import lombok.extern.slf4j.Slf4j; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.http.MediaType; |
|||
import org.springframework.http.MediaTypeFactory; |
|||
import org.springframework.stereotype.Service; |
|||
import org.springframework.transaction.annotation.Transactional; |
|||
import org.ycloud.aipan.component.StoreEngine; |
|||
import org.ycloud.aipan.config.MinioConfig; |
|||
import org.ycloud.aipan.controller.req.FileChunkInitTaskReq; |
|||
import org.ycloud.aipan.controller.req.FileChunkMergeReq; |
|||
import org.ycloud.aipan.controller.req.FileUploadReq; |
|||
import org.ycloud.aipan.dto.FileChunkDTO; |
|||
import org.ycloud.aipan.enums.BizCodeEnum; |
|||
import org.ycloud.aipan.exception.BizException; |
|||
import org.ycloud.aipan.mapper.FileChunkMapper; |
|||
import org.ycloud.aipan.mapper.StorageMapper; |
|||
import org.ycloud.aipan.model.FileChunkDO; |
|||
import org.ycloud.aipan.model.StorageDO; |
|||
import org.ycloud.aipan.service.AccountFileService; |
|||
import org.ycloud.aipan.service.FileChunkService; |
|||
import org.ycloud.aipan.util.CommonUtil; |
|||
import com.amazonaws.services.s3.model.*; |
|||
import java.net.URL; |
|||
import java.util.*; |
|||
import java.util.stream.Collectors; |
|||
|
|||
@Service |
|||
@Slf4j |
|||
public class FileChunkServiceImpl implements FileChunkService { |
|||
@Autowired |
|||
private StorageMapper storageMapper; |
|||
@Autowired |
|||
private StoreEngine fileStoreEngine; |
|||
@Autowired |
|||
private MinioConfig minioConfig; |
|||
@Autowired |
|||
private FileChunkMapper fileChunkMapper; |
|||
@Autowired |
|||
private AccountFileService accountFileService; |
|||
|
|||
@Override |
|||
@Transactional(rollbackFor = Exception.class) |
|||
public FileChunkDTO initFileChunkTask(FileChunkInitTaskReq req) { |
|||
//检查存储空间是否够
|
|||
StorageDO storageDO = storageMapper.selectOne(new QueryWrapper<StorageDO>().eq("account_id", req.getAccountId())); |
|||
if (storageDO.getUsedSize() + req.getTotalSize() > storageDO.getTotalSize()) { |
|||
throw new BizException(BizCodeEnum.FILE_STORAGE_NOT_ENOUGH); |
|||
} |
|||
|
|||
String objectKey = CommonUtil.getFilePath(req.getFilename()); |
|||
// 根据文件名推断内容类型
|
|||
String contentType = MediaTypeFactory.getMediaType(objectKey).orElse(MediaType.APPLICATION_OCTET_STREAM).toString(); |
|||
// 设置文件元数据
|
|||
ObjectMetadata objectMetadata = new ObjectMetadata(); |
|||
objectMetadata.setContentType(contentType); |
|||
// 初始化分片上传,获取上传ID
|
|||
String uploadId = fileStoreEngine.initMultipartUploadTask(minioConfig.getBucketName(), objectKey, objectMetadata).getUploadId(); |
|||
|
|||
// 创建上传任务实体并设置相关属性
|
|||
FileChunkDO task = new FileChunkDO(); |
|||
int chunkNum = (int) Math.ceil(req.getTotalSize() * 1.0 / req.getChunkSize()); |
|||
task.setBucketName(minioConfig.getBucketName()) |
|||
.setChunkNum(chunkNum) |
|||
.setChunkSize(req.getChunkSize()) |
|||
.setTotalSize(req.getTotalSize()) |
|||
.setIdentifier(req.getIdentifier()) |
|||
.setFileName(req.getFilename()) |
|||
.setObjectKey(objectKey) |
|||
.setUploadId(uploadId) |
|||
.setAccountId(req.getAccountId()); |
|||
// 将任务插入数据库
|
|||
fileChunkMapper.insert(task); |
|||
// 构建并返回任务信息DTO
|
|||
return new FileChunkDTO(task).setFinished(false).setExitPartList(new ArrayList<>()); |
|||
} |
|||
|
|||
|
|||
@Override |
|||
public String genPreSignUploadUrl(Long accountId, String identifier, Integer partNumber) { |
|||
FileChunkDO task = fileChunkMapper.selectOne(new QueryWrapper<FileChunkDO>().lambda().eq(FileChunkDO::getIdentifier, identifier).eq(FileChunkDO::getAccountId, accountId)); |
|||
if (task == null) { |
|||
throw new BizException(BizCodeEnum.FILE_CHUNK_TASK_NOT_EXISTS); |
|||
} |
|||
//配置预签名过期时间
|
|||
Date expireDate = DateUtil.offsetMillisecond(new Date(), minioConfig.getPRE_SIGN_URL_EXPIRE().intValue()); |
|||
// 生成预签名URL
|
|||
Map<String, Object> params = new HashMap<>(); |
|||
params.put("partNumber", partNumber.toString()); |
|||
params.put("uploadId", task.getUploadId()); |
|||
URL preSignedUrl = fileStoreEngine.genePreSignedUrl(minioConfig.getBucketName(), task.getObjectKey(), HttpMethod.PUT, expireDate, params); |
|||
log.info("生成预签名URL地址 identifier={},partNumber={}, preSignedUrl={}", identifier, partNumber, preSignedUrl.toString()); |
|||
return preSignedUrl.toString(); |
|||
} |
|||
|
|||
|
|||
public void mergeFileChunk(FileChunkMergeReq req) { |
|||
//获取任务和分片列表,检查是否足够合并
|
|||
FileChunkDO task = fileChunkMapper.selectOne(new QueryWrapper<FileChunkDO>() |
|||
.eq("account_id", req.getAccountId()) |
|||
.eq("identifier", req.getIdentifier())); |
|||
if(task == null){ |
|||
throw new BizException(BizCodeEnum.FILE_CHUNK_TASK_NOT_EXISTS); |
|||
} |
|||
PartListing partListing = fileStoreEngine.listMultipart(task.getBucketName(), task.getObjectKey(), task.getUploadId()); |
|||
List<PartSummary> parts = partListing.getParts(); |
|||
if(parts.size() != task.getChunkNum()){ |
|||
//上传的分片数量和记录中不对应,合并失败
|
|||
throw new BizException(BizCodeEnum.FILE_CHUNK_NOT_ENOUGH); |
|||
} |
|||
//检查更新存储空间
|
|||
StorageDO storageDO = storageMapper.selectOne(new QueryWrapper<>(new StorageDO()) |
|||
.eq("account_id", req.getAccountId())); |
|||
long realFileTotalSize = parts.stream().map(PartSummary::getSize).mapToLong(Long::valueOf).sum(); |
|||
if(storageDO.getUsedSize() + realFileTotalSize > storageDO.getTotalSize()){ |
|||
throw new BizException(BizCodeEnum.FILE_STORAGE_NOT_ENOUGH); |
|||
} |
|||
storageDO.setUsedSize(storageDO.getUsedSize() +realFileTotalSize); |
|||
storageMapper.updateById(storageDO); |
|||
|
|||
//2-合并文件
|
|||
CompleteMultipartUploadResult result = fileStoreEngine.mergeChunks(task.getBucketName(), |
|||
task.getObjectKey(), task.getUploadId(), |
|||
parts.stream().map(partSummary -> |
|||
new PartETag(partSummary.getPartNumber(), partSummary.getETag())) |
|||
.collect(Collectors.toList())); |
|||
//【判断是否合并成功
|
|||
if(result.getETag()!=null){ |
|||
FileUploadReq fileUploadReq = new FileUploadReq(); |
|||
fileUploadReq.setAccountId(req.getAccountId()) |
|||
.setFilename(task.getFileName()) |
|||
.setIdentifier(task.getIdentifier()) |
|||
.setParentId(req.getParentId()) |
|||
.setFileSize(realFileTotalSize) |
|||
.setFile(null); |
|||
|
|||
//存储文件和关联信息到数据库
|
|||
accountFileService.saveFileAndAccountFile(fileUploadReq,task.getObjectKey()); |
|||
//删除相关任务记录
|
|||
fileChunkMapper.deleteById(task.getId()); |
|||
log.info("合并成功"); |
|||
} |
|||
} |
|||
|
|||
@Override |
|||
public FileChunkDTO listFileChunk(Long accountId, String identifier) { |
|||
// 获取任务和分片列表,检查是否足够
|
|||
FileChunkDO task = fileChunkMapper.selectOne(new QueryWrapper<FileChunkDO>().lambda().eq(FileChunkDO::getAccountId, accountId)); |
|||
if (task == null || !identifier.equals(task.getIdentifier())) { |
|||
return null; |
|||
} |
|||
FileChunkDTO result = new FileChunkDTO(task); |
|||
boolean doesObjectExist = fileStoreEngine.doesObjectExist(task.getBucketName(), task.getObjectKey()); |
|||
if (!doesObjectExist) { |
|||
// 不存在,表示未上传完,返回已上传的分片
|
|||
PartListing partListing = fileStoreEngine.listMultipart(task.getBucketName(), task.getObjectKey(), task.getUploadId()); |
|||
if(task.getChunkNum() == partListing.getParts().size()){ |
|||
//已经存在,合并
|
|||
result.setFinished(true).setExitPartList(partListing.getParts()); |
|||
}else { |
|||
result.setFinished(false).setExitPartList(partListing.getParts()); |
|||
} |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
} |
@ -0,0 +1,425 @@ |
|||
<!DOCTYPE html> |
|||
<html lang="zh-CN"> |
|||
<head> |
|||
<meta charset="UTF-8"/> |
|||
<title>大文件上传(断点续传版)</title> |
|||
<style> |
|||
/* 原有样式不变,只新增 .hidden 控制隐藏 */ |
|||
* { |
|||
margin: 0; |
|||
padding: 0; |
|||
box-sizing: border-box |
|||
} |
|||
|
|||
body { |
|||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif; |
|||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); |
|||
display: flex; |
|||
min-height: 100vh; |
|||
align-items: center; |
|||
justify-content: center; |
|||
padding: 20px |
|||
} |
|||
|
|||
.upload-container { |
|||
background: rgba(255, 255, 255, .95); |
|||
border-radius: 20px; |
|||
padding: 40px; |
|||
width: 100%; |
|||
max-width: 600px; |
|||
box-shadow: 0 20px 40px rgba(0, 0, 0, .1) |
|||
} |
|||
|
|||
.upload-header { |
|||
text-align: center; |
|||
margin-bottom: 30px |
|||
} |
|||
|
|||
.upload-header h1 { |
|||
font-size: 32px; |
|||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); |
|||
-webkit-background-clip: text; |
|||
-webkit-text-fill-color: transparent |
|||
} |
|||
|
|||
.upload-area { |
|||
border: 2px dashed #667eea; |
|||
border-radius: 15px; |
|||
padding: 60px 20px; |
|||
text-align: center; |
|||
cursor: pointer; |
|||
transition: all .3s; |
|||
position: relative |
|||
} |
|||
|
|||
.upload-area:hover { |
|||
border-color: #764ba2; |
|||
background: rgba(118, 75, 162, .05) |
|||
} |
|||
|
|||
.upload-area.dragover { |
|||
border-color: #764ba2; |
|||
background: rgba(118, 75, 162, .1); |
|||
transform: scale(1.02) |
|||
} |
|||
|
|||
.upload-icon { |
|||
font-size: 48px; |
|||
margin-bottom: 15px; |
|||
animation: float 3s ease-in-out infinite |
|||
} |
|||
|
|||
@keyframes float { |
|||
0%, 100% { |
|||
transform: translateY(0) |
|||
} |
|||
50% { |
|||
transform: translateY(-10px) |
|||
} |
|||
} |
|||
|
|||
.file-input { |
|||
display: none |
|||
} |
|||
|
|||
.file-info, .progress-container, .success-message, .error-message { |
|||
margin-top: 20px; |
|||
display: none |
|||
} |
|||
|
|||
.file-info.active, .progress-container.active { |
|||
display: block; |
|||
animation: fadeIn .3s |
|||
} |
|||
|
|||
@keyframes fadeIn { |
|||
from { |
|||
opacity: 0 |
|||
} |
|||
to { |
|||
opacity: 1 |
|||
} |
|||
} |
|||
|
|||
.progress-bar-bg { |
|||
background: #e0e0e0; |
|||
border-radius: 10px; |
|||
height: 20px; |
|||
overflow: hidden; |
|||
position: relative |
|||
} |
|||
|
|||
.progress-bar { |
|||
background: linear-gradient(90deg, #667eea 0%, #764ba2 100%); |
|||
height: 100%; |
|||
width: 0; |
|||
transition: width .3s; |
|||
position: relative; |
|||
overflow: hidden |
|||
} |
|||
|
|||
.progress-bar::after { |
|||
content: ''; |
|||
position: absolute; |
|||
top: 0; |
|||
left: 0; |
|||
bottom: 0; |
|||
right: 0; |
|||
background: linear-gradient(90deg, transparent, rgba(255, 255, 255, .3), transparent); |
|||
animation: shimmer 2s infinite |
|||
} |
|||
|
|||
@keyframes shimmer { |
|||
0% { |
|||
transform: translateX(-100%) |
|||
} |
|||
100% { |
|||
transform: translateX(100%) |
|||
} |
|||
} |
|||
|
|||
.progress-text { |
|||
text-align: center; |
|||
margin-top: 10px; |
|||
color: #666; |
|||
font-size: 14px |
|||
} |
|||
|
|||
.upload-btn { |
|||
margin-top: 20px; |
|||
padding: 12px 40px; |
|||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); |
|||
color: #fff; |
|||
border: none; |
|||
border-radius: 25px; |
|||
font-size: 16px; |
|||
cursor: pointer; |
|||
transition: all .3s; |
|||
display: none |
|||
} |
|||
|
|||
.upload-btn:hover { |
|||
transform: translateY(-2px); |
|||
box-shadow: 0 5px 15px rgba(0, 0, 0, .2) |
|||
} |
|||
|
|||
.upload-btn.active { |
|||
display: inline-block; |
|||
animation: bounceIn .5s |
|||
} |
|||
|
|||
@keyframes bounceIn { |
|||
0% { |
|||
transform: scale(.8); |
|||
opacity: 0 |
|||
} |
|||
50% { |
|||
transform: scale(1.1) |
|||
} |
|||
100% { |
|||
transform: scale(1); |
|||
opacity: 1 |
|||
} |
|||
} |
|||
|
|||
.hidden { |
|||
display: none |
|||
} |
|||
|
|||
.loading-spinner { |
|||
display: none; |
|||
width: 40px; |
|||
height: 40px; |
|||
margin: 20px auto; |
|||
border: 4px solid #f3f3f3; |
|||
border-top: 4px solid #667eea; |
|||
border-radius: 50%; |
|||
animation: spin 1s linear infinite |
|||
} |
|||
|
|||
@keyframes spin { |
|||
0% { |
|||
transform: rotate(0deg) |
|||
} |
|||
100% { |
|||
transform: rotate(360deg) |
|||
} |
|||
} |
|||
</style> |
|||
</head> |
|||
<body> |
|||
<div class="upload-container"> |
|||
<div class="upload-header"> |
|||
<h1>大文件上传(断点续传)</h1> |
|||
<p>支持秒传、断点续传,最大支持2GB</p> |
|||
</div> |
|||
|
|||
<!-- 拖拽区域 --> |
|||
<div class="upload-area" id="uploadArea"> |
|||
<div class="upload-icon">📁</div> |
|||
<p>点击或拖拽文件到此处上传</p> |
|||
<input type="file" id="fileInput" class="file-input"/> |
|||
</div> |
|||
|
|||
<!-- 文件信息 --> |
|||
<div class="file-info" id="fileInfo"> |
|||
<div class="file-name" id="fileName"></div> |
|||
<div class="file-size" id="fileSize"></div> |
|||
</div> |
|||
|
|||
<!-- 进度 --> |
|||
<div class="progress-container" id="progressContainer"> |
|||
<div class="progress-bar-bg"> |
|||
<div class="progress-bar" id="progressBar"></div> |
|||
</div> |
|||
<div class="progress-text" id="progressText">0%</div> |
|||
<div class="chunk-info" id="chunkInfo"></div> |
|||
</div> |
|||
|
|||
<!-- 操作按钮 --> |
|||
<button class="upload-btn" id="uploadBtn">开始上传</button> |
|||
<div class="loading-spinner" id="loadingSpinner"></div> |
|||
|
|||
<!-- 结果提示 --> |
|||
<div class="success-message" id="successMessage">✅ 上传成功</div> |
|||
<div class="error-message" id="errorMessage">❌ 上传失败</div> |
|||
</div> |
|||
|
|||
<!-- SparkMD5 计算 MD5 --> |
|||
<script src="https://cdn.jsdelivr.net/npm/spark-md5@3.0.2/spark-md5.min.js"></script> |
|||
<script> |
|||
const uploadArea = document.getElementById('uploadArea'); |
|||
const fileInput = document.getElementById('fileInput'); |
|||
const fileName = document.getElementById('fileName'); |
|||
const fileSize = document.getElementById('fileSize'); |
|||
const progressBar = document.getElementById('progressBar'); |
|||
const progressText = document.getElementById('progressText'); |
|||
const chunkInfo = document.getElementById('chunkInfo'); |
|||
const uploadBtn = document.getElementById('uploadBtn'); |
|||
const loadingSpinner = document.getElementById('loadingSpinner'); |
|||
const successBox = document.getElementById('successMessage'); |
|||
const errorBox = document.getElementById('errorMessage'); |
|||
const fileInfoBox = document.getElementById('fileInfo'); |
|||
const progressBox = document.getElementById('progressContainer'); |
|||
|
|||
let file = null; |
|||
let identifier = ''; |
|||
const chunkSize = 5 * 1024 * 1024; // 5MB |
|||
let totalChunks = 0; |
|||
let uploadedParts = new Set(); // 已上传分片编号 |
|||
|
|||
/* ---------- 拖拽&选择 ---------- */ |
|||
uploadArea.addEventListener('click', () => fileInput.click()); |
|||
fileInput.addEventListener('change', e => handleFile(e.target.files[0])); |
|||
uploadArea.addEventListener('dragover', e => { |
|||
e.preventDefault(); |
|||
uploadArea.classList.add('dragover'); |
|||
}); |
|||
uploadArea.addEventListener('dragleave', () => uploadArea.classList.remove('dragover')); |
|||
uploadArea.addEventListener('drop', e => { |
|||
e.preventDefault(); |
|||
uploadArea.classList.remove('dragover'); |
|||
handleFile(e.dataTransfer.files[0]); |
|||
}); |
|||
|
|||
function handleFile(f) { |
|||
if (!f) return; |
|||
file = f; |
|||
fileName.textContent = f.name; |
|||
fileSize.textContent = formatSize(f.size); |
|||
fileInfoBox.classList.add('active'); |
|||
uploadBtn.classList.add('active'); |
|||
} |
|||
|
|||
function formatSize(b) { |
|||
const units = ['B', 'KB', 'MB', 'GB']; |
|||
let i = 0; |
|||
while (b >= 1024 && i < 3) { |
|||
b /= 1024; |
|||
i++; |
|||
} |
|||
return b.toFixed(2) + ' ' + units[i]; |
|||
} |
|||
|
|||
/* ---------- 工具 ---------- */ |
|||
async function md5File(f) { |
|||
return new Promise((resolve, reject) => { |
|||
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice; |
|||
const chunks = Math.ceil(f.size / chunkSize); |
|||
let currentChunk = 0; |
|||
const spark = new SparkMD5.ArrayBuffer(); |
|||
const reader = new FileReader(); |
|||
|
|||
reader.onload = e => { |
|||
spark.append(e.target.result); |
|||
currentChunk++; |
|||
if (currentChunk < chunks) loadNext(); |
|||
else resolve(spark.end()); |
|||
}; |
|||
reader.onerror = () => reject('FileReader error'); |
|||
|
|||
function loadNext() { |
|||
const start = currentChunk * chunkSize; |
|||
const end = Math.min(start + chunkSize, f.size); |
|||
reader.readAsArrayBuffer(blobSlice.call(f, start, end)); |
|||
} |
|||
|
|||
loadNext(); |
|||
}); |
|||
} |
|||
|
|||
/* ---------- 上传主流程 ---------- */ |
|||
uploadBtn.addEventListener('click', startUpload); |
|||
|
|||
async function startUpload() { |
|||
if (!file) return; |
|||
uploadBtn.style.display = 'none'; |
|||
loadingSpinner.style.display = 'block'; |
|||
progressBox.classList.add('active'); |
|||
|
|||
try { |
|||
identifier = await md5File(file); |
|||
totalChunks = Math.ceil(file.size / chunkSize); |
|||
|
|||
// 查询已上传的分片 |
|||
const progressResp = await fetch(`http://localhost:8081/api/file/v1/chunk_upload_progress/${identifier}`); |
|||
const progressData = await progressResp.json(); |
|||
if (progressData.code ===0 && progressData.data != null && progressData.data.exitPartList) { |
|||
progressData.data.exitPartList.forEach(p => uploadedParts.add(p.partNumber)); |
|||
} |
|||
|
|||
// 初始化任务(已存在会秒传) |
|||
const initResp = await fetch('http://localhost:8081/api/file/v1/init_file_chunk_task', { |
|||
method: 'POST', |
|||
headers: {'Content-Type': 'application/json'}, |
|||
body: JSON.stringify({ |
|||
filename: file.name, |
|||
identifier, |
|||
totalSize: file.size, |
|||
chunkSize |
|||
}) |
|||
}); |
|||
const initData = await initResp.json(); |
|||
if (initData.code !== 0) throw new Error(initData.msg || '初始化失败'); |
|||
const uploadId = initData.data.uploadId; |
|||
|
|||
// 上传缺失分片 |
|||
let uploaded = uploadedParts.size; |
|||
for (let i = 0; i < totalChunks; i++) { |
|||
const partNumber = i + 1; |
|||
if (uploadedParts.has(partNumber)) continue; |
|||
|
|||
const start = i * chunkSize; |
|||
const end = Math.min(start + chunkSize, file.size); |
|||
const chunk = file.slice(start, end); |
|||
|
|||
// 获取预签名 URL |
|||
const urlResp = await fetch(`http://localhost:8081/api/file/v1/get_file_chunk_upload_url/${identifier}/${partNumber}`); |
|||
|
|||
const urlData = await urlResp.json(); |
|||
if (urlData.code !== 0) throw new Error(urlData.msg || '获取URL失败'); |
|||
|
|||
// PUT 上传 |
|||
await fetch(urlData.data, {method: 'PUT', body: chunk}); |
|||
uploaded++; |
|||
const percent = (uploaded / totalChunks) * 100; |
|||
progressBar.style.width = percent + '%'; |
|||
progressText.textContent = Math.round(percent) + '%'; |
|||
chunkInfo.textContent = `正在上传第 ${partNumber} / ${totalChunks} 个分片`; |
|||
} |
|||
|
|||
// 合并 |
|||
const mergeResp = await fetch('http://localhost:8081/api/file/v1/merge_file_chunk', { |
|||
method: 'POST', |
|||
headers: {'Content-Type': 'application/json'}, |
|||
body: JSON.stringify({identifier: identifier, parentId: 0}) |
|||
}); |
|||
const mergeData = await mergeResp.json(); |
|||
if (mergeData.code !== 0) throw new Error(mergeData.msg || '合并失败'); |
|||
|
|||
loadingSpinner.style.display = 'none'; |
|||
successBox.style.display = 'block'; |
|||
reset(); |
|||
} catch (e) { |
|||
loadingSpinner.style.display = 'none'; |
|||
errorBox.textContent = `❌ ${e.message || '上传出错'}`; |
|||
errorBox.style.display = 'block'; |
|||
} |
|||
} |
|||
|
|||
function reset() { |
|||
setTimeout(() => { |
|||
fileInfoBox.classList.remove('active'); |
|||
progressBox.classList.remove('active'); |
|||
uploadBtn.classList.remove('active'); |
|||
uploadBtn.style.display = ''; |
|||
[successBox, errorBox].forEach(b => b.style.display = 'none'); |
|||
progressBar.style.width = '0%'; |
|||
progressText.textContent = '0%'; |
|||
file = null; |
|||
fileInput.value = ''; |
|||
}, 3000); |
|||
} |
|||
</script> |
|||
</body> |
|||
</html> |
File diff suppressed because one or more lines are too long
@ -0,0 +1,150 @@ |
|||
package org.ycloud.aipan; |
|||
|
|||
import com.amazonaws.HttpMethod; |
|||
import com.amazonaws.services.s3.AmazonS3Client; |
|||
import com.amazonaws.services.s3.model.*; |
|||
import lombok.extern.slf4j.Slf4j; |
|||
import org.junit.jupiter.api.Test; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.boot.test.context.SpringBootTest; |
|||
|
|||
import java.net.URL; |
|||
import java.util.*; |
|||
import java.util.stream.Collectors; |
|||
|
|||
@SpringBootTest |
|||
@Slf4j |
|||
public class BigFileUploadTest { |
|||
|
|||
|
|||
@Autowired |
|||
private AmazonS3Client amazonS3Client; |
|||
|
|||
//=====================大文件上传相关接口===========================
|
|||
|
|||
/** |
|||
* 第一步:初始化大文件分片上传任务,获取uploadId |
|||
* 如果初始化时有 uploadId,说明是断点续传,不能重新生成 uploadId |
|||
*/ |
|||
@Test |
|||
public void testInitiateMultipartUploadTask() { |
|||
String bucketName = "ai-pan"; |
|||
String objectKey = "/meta/test5.txt"; |
|||
|
|||
ObjectMetadata objectMetadata = new ObjectMetadata(); |
|||
objectMetadata.setContentType("text/plain"); |
|||
// objectMetadata.setContentType("application/vnd.openxmlformats-officedocument.wordprocessingml.document");
|
|||
//初始化分片上传请求
|
|||
InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, objectKey, objectMetadata); |
|||
|
|||
//初始化分片上传任务
|
|||
InitiateMultipartUploadResult uploadResult = amazonS3Client.initiateMultipartUpload(initRequest); |
|||
String uploadId = uploadResult.getUploadId(); |
|||
log.info("uploadId:{}", uploadId); |
|||
|
|||
} |
|||
|
|||
@Test |
|||
public void testGenePreSignedUrls() { |
|||
// 定义对象键名
|
|||
String objectKey = "/meta/test5.txt"; |
|||
// 定义存储桶名称
|
|||
String bucket = "ai-pan"; |
|||
// 定义分片数量,这里设置为4个分片
|
|||
int chunkCount = 4; |
|||
|
|||
String uploadId = "NzViMDY5NGUtM2IwOC00ZDhkLTk1ODMtM2EyYThhZGFmNmI3LjVhMDlmZDJkLWE2ODgtNDgwNS1hODQwLWZhMTEwNWI0NTUwZA"; |
|||
|
|||
// 创建用于存储分片URL的列表
|
|||
List<String> partList = new ArrayList<>(); |
|||
// 遍历每个分片,生成预签名的URL
|
|||
for (int i = 1; i <= chunkCount; i++) { |
|||
// 生成预签名的 URL, 设置过期时间,例如 1 小时后
|
|||
Date expiration = new Date(System.currentTimeMillis() + 3600 * 1000); |
|||
// 创建生成预签名URL的请求,并指定HTTP方法为PUT
|
|||
GeneratePresignedUrlRequest genePreSignedUrlReq = new GeneratePresignedUrlRequest(bucket, objectKey, HttpMethod.PUT).withExpiration(expiration); |
|||
// 添加上传ID和分片编号作为请求参数
|
|||
genePreSignedUrlReq.addRequestParameter("uploadId", uploadId); |
|||
genePreSignedUrlReq.addRequestParameter("partNumber", String.valueOf(i)); |
|||
// 生成并获取预签名URL
|
|||
URL url = amazonS3Client.generatePresignedUrl(genePreSignedUrlReq); |
|||
// 将生成的URL添加到列表中
|
|||
partList.add(url.toString()); |
|||
// 日志输出当前分片的URL列表
|
|||
log.info("partList:{}", partList); |
|||
} |
|||
} |
|||
|
|||
|
|||
// 测试合并分片的方法
|
|||
@Test |
|||
public void testMergeChunks() { |
|||
// 定义对象键名
|
|||
String objectKey = "/meta/test5.txt"; |
|||
// 定义存储桶名称
|
|||
String bucket = "ai-pan"; |
|||
// 定义分片数量,这里设置为4个分片
|
|||
int chunkCount = 4; |
|||
// 定义上传ID,用于标识特定的分片上传事件
|
|||
String uploadId = "NzViMDY5NGUtM2IwOC00ZDhkLTk1ODMtM2EyYThhZGFmNmI3LjVhMDlmZDJkLWE2ODgtNDgwNS1hODQwLWZhMTEwNWI0NTUwZA"; |
|||
|
|||
// 创建一个列出分片请求对象
|
|||
ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, objectKey, uploadId); |
|||
// 获取分片列表
|
|||
PartListing partListing = amazonS3Client.listParts(listPartsRequest); |
|||
List<PartSummary> parts = partListing.getParts(); |
|||
// 检查分片数量是否与预期一致
|
|||
if (chunkCount != parts.size()) { |
|||
// 已上传分块数量与记录中的数量不对应,不能合并分片
|
|||
throw new RuntimeException("分片缺失,请重新上传"); |
|||
} |
|||
|
|||
// 创建一个完成分片上传请求对象
|
|||
CompleteMultipartUploadRequest completeMultipartUploadRequest = new CompleteMultipartUploadRequest() |
|||
.withUploadId(uploadId) |
|||
.withKey(objectKey) |
|||
.withBucketName(bucket) |
|||
.withPartETags(parts.stream() |
|||
// 将每个分片的编号和ETag封装到PartETag对象中
|
|||
.map(partSummary -> new PartETag(partSummary.getPartNumber(), partSummary.getETag())) |
|||
.collect(Collectors.toList())); |
|||
|
|||
// 完成分片上传并获取结果
|
|||
CompleteMultipartUploadResult result = amazonS3Client.completeMultipartUpload(completeMultipartUploadRequest); |
|||
log.info("result:{}", result.getBucketName()); |
|||
} |
|||
|
|||
@Test |
|||
// 测试列出分片上传的各个分片信息
|
|||
public void testListParts() { |
|||
// 定义对象键名
|
|||
String objectKey = "/meta/test5.txt"; |
|||
// 定义存储桶名称
|
|||
String bucket = "ai-pan"; |
|||
// 定义上传ID,用于标识特定的分片上传事件
|
|||
String uploadId = "ZjFkZjRhN2UtNzMzOS04NTUxLTgwOTEtNWViNzUwNmRmYTEzLmE4NTUyMmQyLTM1NjUtNGMwMS05ZTY2LWQ5MWQ4NDUyBmIyA"; |
|||
|
|||
// 检查指定的存储桶中是否存在具有指定对象键名的对象
|
|||
boolean doesObjectExist = amazonS3Client.doesObjectExist(bucket, objectKey); |
|||
if (!doesObjectExist) { |
|||
// 未上传完,返回已上传的分片
|
|||
ListPartsRequest listPartsRequest = new ListPartsRequest(bucket, objectKey, uploadId); |
|||
PartListing partListing = amazonS3Client.listParts(listPartsRequest); |
|||
List<PartSummary> parts = partListing.getParts(); |
|||
// 创建一个结果映射,用于存放上传状态和分片列表
|
|||
Map<String, Object> result = new HashMap<>(); |
|||
result.put("finished", false); |
|||
result.put("exitPartList", parts); |
|||
//前端可以通过这个判断是否要调用合并merge接口
|
|||
log.info("result:{}", result); |
|||
|
|||
// 遍历并打印每个分片的信息
|
|||
for (PartSummary partSummary : parts) { |
|||
System.out.println("getPartNumber:" + partSummary.getPartNumber() + ",getETag=" + partSummary.getETag() + ",getSize= " + partSummary.getSize() + ",getLastModified=" + partSummary.getLastModified()); |
|||
} |
|||
// 打印存储桶名称
|
|||
System.out.println(partListing.getBucketName()); |
|||
} |
|||
|
|||
} |
|||
} |
@ -0,0 +1,158 @@ |
|||
package org.ycloud.aipan; |
|||
|
|||
import lombok.SneakyThrows; |
|||
import lombok.extern.slf4j.Slf4j; |
|||
import org.apache.http.client.methods.CloseableHttpResponse; |
|||
import org.apache.http.client.methods.HttpPut; |
|||
import org.apache.http.entity.FileEntity; |
|||
import org.apache.http.impl.client.CloseableHttpClient; |
|||
import org.apache.http.impl.client.HttpClients; |
|||
import org.assertj.core.util.Lists; |
|||
import org.junit.jupiter.api.Test; |
|||
import org.springframework.beans.factory.annotation.Autowired; |
|||
import org.springframework.boot.test.context.SpringBootTest; |
|||
import org.ycloud.aipan.controller.req.FileChunkInitTaskReq; |
|||
import org.ycloud.aipan.controller.req.FileChunkMergeReq; |
|||
import org.ycloud.aipan.dto.FileChunkDTO; |
|||
import org.ycloud.aipan.service.FileChunkService; |
|||
|
|||
import java.io.File; |
|||
import java.io.FileInputStream; |
|||
import java.io.FileOutputStream; |
|||
import java.io.IOException; |
|||
import java.net.http.HttpClient; |
|||
import java.net.http.HttpResponse; |
|||
import java.util.List; |
|||
|
|||
@SpringBootTest |
|||
@Slf4j |
|||
class FileChunkUploadTests { |
|||
|
|||
@Autowired |
|||
private FileChunkService fileChunkService; |
|||
|
|||
private Long accountId = 3L; |
|||
|
|||
private String identifier = "abcsdfsd"; |
|||
|
|||
/** |
|||
* 存储分片后端的文件路径和名称 |
|||
*/ |
|||
private final List<String> chunkFilePaths = Lists.newArrayList(); |
|||
|
|||
/** |
|||
* 存储分片上传地址 |
|||
*/ |
|||
private final List<String> chunkUploadUrls = Lists.newArrayList(); |
|||
|
|||
/** |
|||
* 上传ID |
|||
*/ |
|||
private String uploadId; |
|||
|
|||
/** |
|||
* 分片大小,5MB |
|||
*/ |
|||
private final long chunkSize = 5 * 1024 * 1024; |
|||
|
|||
/** |
|||
* 用一个10MB以上的文件,按5MB分片大小进行分片 |
|||
*/ |
|||
|
|||
@Test |
|||
public void testCreateChunkFiles() { |
|||
|
|||
// 将文件分片存储
|
|||
String filePath = "/Users/xdclass/Desktop/chunk/es_note.pdf"; |
|||
File file = new File(filePath); |
|||
long fileSize = file.length(); |
|||
//int chunkCount = (int) Math.ceil((double) fileSize / CHUNK_SIZE);
|
|||
int chunkCount = (int) Math.ceil(fileSize * 1.0 / chunkSize); |
|||
log.info("创建分片数量是: {} chunks", chunkCount); |
|||
try (FileInputStream fis = new FileInputStream(file)) { |
|||
byte[] buffer = new byte[(int) chunkSize]; |
|||
for (int i = 0; i < chunkCount; i++) { |
|||
String chunkFileName = filePath + ".part" + (i + 1); |
|||
try (FileOutputStream fos = new FileOutputStream(chunkFileName)) { |
|||
int bytesRead = fis.read(buffer); |
|||
fos.write(buffer, 0, bytesRead); |
|||
log.info("创建的分片文件名: {} ({} bytes)", chunkFileName, bytesRead); |
|||
chunkFilePaths.add(chunkFileName); |
|||
} |
|||
} |
|||
} catch (IOException e) { |
|||
e.printStackTrace(); |
|||
} |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 第1步,创建分片上传任务 |
|||
*/ |
|||
private void testInitFileChunkTask() { |
|||
FileChunkInitTaskReq req = new FileChunkInitTaskReq(); |
|||
req.setAccountId(accountId).setFilename("es_note.pdf") |
|||
.setTotalSize((long) (20552959))//20552959
|
|||
.setChunkSize((long) (5 * 1024 * 1024))//5242880
|
|||
.setIdentifier(identifier); |
|||
FileChunkDTO fileChunkDTO = fileChunkService.initFileChunkTask(req); |
|||
log.info("分片上传初始化结果: {}", fileChunkDTO); |
|||
|
|||
uploadId = fileChunkDTO.getUploadId(); |
|||
|
|||
testGetFileChunkUploadUrl(); |
|||
} |
|||
/** |
|||
* 第2步,获取分片上传地址,返回临时MinIO地址,前端直接上传到Minio里面 |
|||
*/ |
|||
private void testGetFileChunkUploadUrl() { |
|||
|
|||
for (int i = 1; i <= chunkFilePaths.size(); i++) { |
|||
String uploadUrl = fileChunkService.genPreSignUploadUrl(accountId, identifier, i); |
|||
log.info("分片上传地址: {}", uploadUrl); |
|||
//存储4个分片地址
|
|||
chunkUploadUrls.add(uploadUrl); |
|||
} |
|||
|
|||
uploadChunk(); |
|||
} |
|||
|
|||
/** |
|||
* 模拟前端直接上传分片 |
|||
*/ |
|||
@SneakyThrows |
|||
private void uploadChunk() { |
|||
CloseableHttpClient httpClient = HttpClients.createDefault(); |
|||
for (int i = 0; i < chunkUploadUrls.size(); i++) { |
|||
// PUT直接上传到minio
|
|||
String chunkUploadId = chunkUploadUrls.get(i); |
|||
HttpPut httpPut = new HttpPut(chunkUploadId); |
|||
httpPut.setHeader("Content-Type","application/octet-stream"); |
|||
File chunkFile = new File(chunkFilePaths.get(i)); |
|||
FileEntity chunkFileEntity = new FileEntity(chunkFile); |
|||
httpPut.setEntity(chunkFileEntity); |
|||
CloseableHttpResponse chunkUploadResp = httpClient.execute(httpPut); |
|||
httpPut.releaseConnection(); |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* 测试合并分片 |
|||
*/ |
|||
@Test |
|||
public void testMergeFileChunk() { |
|||
FileChunkMergeReq req = new FileChunkMergeReq(); |
|||
req.setAccountId(accountId).setIdentifier(identifier).setParentId(233L); |
|||
fileChunkService.mergeFileChunk(req); |
|||
} |
|||
|
|||
|
|||
/** |
|||
* 查询分片上传进度 |
|||
*/ |
|||
@Test |
|||
public void testChunkUploadProgress() { |
|||
FileChunkDTO fileChunkDTO = fileChunkService.listFileChunk(accountId, identifier); |
|||
log.info("分片上传进度: {}", fileChunkDTO); |
|||
} |
|||
} |
Loading…
Reference in new issue