업로드 관련 수정

This commit is contained in:
Moon
2026-01-08 18:43:40 +09:00
parent 0a311b09a8
commit ecf9b8a24f
15 changed files with 185 additions and 239 deletions

View File

@@ -490,7 +490,8 @@ public class FIleChecker {
return true;
}
public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) {
public static boolean multipartChunkSaveTo(
MultipartFile mfile, String targetPath, int chunkIndex) {
File dest = new File(targetPath, String.valueOf(chunkIndex));
boolean fileUpload = true;

View File

@@ -13,7 +13,6 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional;
import jakarta.validation.Valid;
import java.io.IOException;
import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
@@ -106,7 +105,6 @@ public class ModelMngApiController {
return ApiResponseDto.okObject(modelMngService.removeModel(UUID.fromString(uuid)));
}
@Operation(summary = "모델등록")
@PostMapping
public ApiResponseDto<ApiResponseDto.ResponseObj> ModelMgmt(
@@ -115,7 +113,6 @@ public class ModelMngApiController {
return ApiResponseDto.ok(modelMngService.insertModel(addReq));
}
@Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.")
@ApiResponses(
value = {

View File

@@ -125,9 +125,7 @@ public class ModelMngDto {
private String fileName;
private String memo;
@JsonIgnore
private UUID uuid;
@JsonIgnore private UUID uuid;
}
@Schema(name = "searchReq", description = "검색 요청")

View File

@@ -59,7 +59,6 @@ public class ModelMngService {
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다.");
}
public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) {
UUID uuid = UUID.randomUUID();
addReq.setUuid(uuid);
@@ -67,12 +66,10 @@ public class ModelMngService {
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다.");
}
public UploadDto.UploadRes uploadChunkModelFile(UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile)
{
public UploadDto.UploadRes uploadChunkModelFile(
UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) {
UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile);
return upRes;
}
}

View File

@@ -1,10 +1,7 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository;
import jakarta.persistence.EntityNotFoundException;
import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor;

View File

@@ -12,8 +12,7 @@ public class UploadSessionCoreService {
private final UploadSessionRepository uploadSessionRepository;
public void createUploadSession(UploadDto.UploadAddReq addReq)
{
public void createUploadSession(UploadDto.UploadAddReq addReq) {
/*
UUID newUuid = UUID.randomUUID();
@@ -39,7 +38,6 @@ public class UploadSessionCoreService {
*/
uploadSessionRepository.insertUploadSession(addReq);
}
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {

View File

@@ -86,5 +86,4 @@ public class UploadSessionEntity {
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid", nullable = false)
private UUID uuid;
}

View File

@@ -20,7 +20,6 @@ public interface ModelMngRepositoryCustom {
Optional<ModelMngEntity> findByModelUuid(UUID uuid);
void insertModel(ModelMngDto.AddReq addReq);
void deleteByModelUuid(UUID uuid);

View File

@@ -123,10 +123,7 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
System.out.println("uuid == " + uuid);
return Optional.ofNullable(
queryFactory
.selectFrom(modelMngEntity)
.where(modelMngEntity.uuid.eq(uuid))
.fetchOne());
queryFactory.selectFrom(modelMngEntity).where(modelMngEntity.uuid.eq(uuid)).fetchOne());
}
private BooleanExpression eventEndedAtBetween(
@@ -200,5 +197,4 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.where(modelMngEntity.uuid.eq(uuid))
.execute();
}
}

View File

@@ -1,19 +1,15 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByUuid(UUID uuid);
void updateUploadSessionStatus(UploadDto.UploadAddReq addReq);
}

View File

@@ -1,6 +1,5 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
@@ -44,8 +43,7 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
)
uploadSessionEntity.uuid)
.values(
addReq.getUploadId(),
addReq.getDatasetId(),
@@ -58,14 +56,10 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
addReq.getChunkTotalIndex(),
addReq.getUploadDivi(),
addReq.getFileHash(),
addReq.getUuid()
)
addReq.getUuid())
.execute();
}
@Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
@@ -85,19 +79,19 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
))
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(uploadSessionEntity.datasetId.eq(datasetId)
.where(
uploadSessionEntity
.datasetId
.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.limit(1)
.fetchOne();
return foundContent;
}
@Override
public UploadDto.uploadDto findByUuid(UUID uuid) {
@@ -117,18 +111,15 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid
))
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(uploadSessionEntity.uuid.eq(uuid))
.limit(1)
.fetchOne();
return foundContent;
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
long fileCount =
queryFactory
@@ -138,5 +129,4 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
.where(uploadSessionEntity.uploadId.eq(addReq.getUploadId()))
.execute();
}
}

View File

@@ -116,8 +116,7 @@ public class UploadApiController {
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/chunk-upload-complete/{uuid}")
public ApiResponseDto<UploadDto.UploadRes> completeUpload(
@PathVariable UUID uuid) {
public ApiResponseDto<UploadDto.UploadRes> completeUpload(@PathVariable UUID uuid) {
return ApiResponseDto.ok(uploadService.completeUpload(uuid));
}

View File

@@ -153,7 +153,6 @@ public class UploadDto {
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@@ -175,7 +174,6 @@ public class UploadDto {
@Schema(description = "총 청크 Index", example = "100")
private Integer chunkTotalIndex;
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
@@ -183,7 +181,6 @@ public class UploadDto {
@Schema(description = "uuid")
private UUID uuid;
}
@Schema(name = "UploadRes", description = "업로드 수행 후 리턴")
@@ -206,11 +203,8 @@ public class UploadDto {
}
return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0;
}
}
@Schema(name = "DmlReturn", description = "수행 후 리턴")
@Getter
@Setter

View File

@@ -87,12 +87,10 @@ public class UploadService {
UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes);
if (!upRes.getRes().equals("success")) return upRes;
status = FileUploadStatus.UPLOADING.name();
upAddReqDto.setStatus(status);
if( dto != null )
{
if (dto != null) {
tmpDataSetDir = dto.getTempPath();
fianlDir = dto.getFinalPath();
}
@@ -101,8 +99,7 @@ public class UploadService {
if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes;
// chunk저장하기
if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) )
{
if (!FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex)) {
upRes.setRes("fail");
upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러");
}
@@ -113,7 +110,6 @@ public class UploadService {
upAddReqDto.setStatus(FileUploadStatus.DONE.name());
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
try {
this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex);
@@ -127,9 +123,6 @@ public class UploadService {
upRes.setResMsg("파일Chunk 병합(merge) 애러");
return upRes;
}
}
return upRes;
@@ -149,7 +142,8 @@ public class UploadService {
upRes.setChunkTotalIndex(dto.getChunkTotalIndex());
try {
this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
this.mergeChunks(
dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
} catch (IOException e) {
upRes.setRes("fail");
@@ -159,21 +153,17 @@ public class UploadService {
}
return upRes;
}
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir)
{
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) {
if( ! FIleChecker.mkDir(tmpDataSetDir) )
{
if (!FIleChecker.mkDir(tmpDataSetDir)) {
upRes.setRes("fail");
upRes.setRes("CHUNK 폴더 생성 ERROR");
return false;
}
if( ! FIleChecker.mkDir(fianlDir) )
{
if (!FIleChecker.mkDir(fianlDir)) {
upRes.setRes("fail");
upRes.setRes("업로드 완료 폴더 생성 ERROR");
return false;
@@ -182,8 +172,8 @@ public class UploadService {
return true;
}
public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
public UploadDto.uploadDto checkUploadSession(
UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
upRes.setRes("success");
upRes.setResMsg("정상처리되었습니다.");
@@ -191,8 +181,7 @@ public class UploadService {
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid());
if (upAddReqDto.getChunkIndex() == 0) {
if( dto != null )
{
if (dto != null) {
upRes.setRes("duplicate");
upRes.setResMsg("이미 진행중인 업로드세션입니다.");
return dto;
@@ -201,8 +190,7 @@ public class UploadService {
upAddReqDto.setStatus("UPLOADING");
upRes.setUuid(upAddReqDto.getUuid());
uploadSessionCoreService.createUploadSession(upAddReqDto);
}
else {
} else {
if (dto == null) {
upRes.setRes("nosession");
upRes.setResMsg("업로드 세션이 존재하지 않습니다.");
@@ -219,15 +207,15 @@ public class UploadService {
upRes.setChunkIndex(upAddReqDto.getChunkIndex());
upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex());
return dto;
}
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException {
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex)
throws IOException {
Path outputPath = Paths.get(fianlDir, fileName);
try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
try (FileChannel outChannel =
FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
for (int i = 0; i <= chunkTotalIndex; i++) {
Path chunkPath = Paths.get(tmpDir, i + "");
@@ -245,8 +233,5 @@ public class UploadService {
// 병합후 임시 폴더 삭제
FIleChecker.deleteFolder(tmpDir);
}
}