diff --git a/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java b/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java new file mode 100644 index 00000000..d6c1b004 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java @@ -0,0 +1,28 @@ +package com.kamco.cd.kamcoback.common.enums; + +import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; +import com.kamco.cd.kamcoback.common.utils.enums.EnumType; +import lombok.AllArgsConstructor; +import lombok.Getter; + +@CodeExpose +@Getter +@AllArgsConstructor +public enum FileUploadStatus implements EnumType { + INIT("초기화"), + UPLOADING("업로드중"), + DONE("업로드완료"), + MERGED("병합완료"); + + private final String desc; + + @Override + public String getId() { + return name(); + } + + @Override + public String getText() { + return desc; + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java index ce622964..dd3c6ce8 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java @@ -29,6 +29,7 @@ import lombok.Getter; import org.apache.commons.io.FilenameUtils; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.gce.geotiff.GeoTiffReader; +import org.springframework.util.FileSystemUtils; import org.springframework.web.multipart.MultipartFile; public class FIleChecker { @@ -489,6 +490,23 @@ public class FIleChecker { return true; } + public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) { + File dest = new File(targetPath, String.valueOf(chunkIndex)); + + boolean fileUpload = true; + try { + mfile.transferTo(dest); + } catch (IOException e) { + return false; + } + + return true; + } + + public static boolean deleteFolder(String path) { + return FileSystemUtils.deleteRecursively(new File(path)); + } + public static boolean validationMultipart(MultipartFile mfile) { // 파일 유효성 검증 if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) { diff --git a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java index 8fa98624..f254918a 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java @@ -44,6 +44,8 @@ public class SecurityConfig { .authorizeHttpRequests( auth -> auth + // .requestMatchers("/chunk_upload_test.html").authenticated() + // 맵시트 영역 전체 허용 (우선순위 최상단) .requestMatchers("/api/mapsheet/**") .permitAll() @@ -77,7 +79,11 @@ public class SecurityConfig { "/api/auth/logout", "/swagger-ui/**", "/api/members/*/password", - "/v3/api-docs/**") + "/v3/api-docs/**", + "/chunk_upload_test.html", + "/api/model/file-chunk-upload", + "/api/upload/file-chunk-upload", + "/api/upload/chunk-upload-complete") .permitAll() // 로그인한 사용자만 가능 IAM .requestMatchers("/api/user/**", "/api/my/menus", "/api/code/**") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index 1dcb01b6..c6b89ae5 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.service.ModelMngService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Schema; @@ -11,16 +12,20 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.transaction.Transactional; +import jakarta.validation.Valid; import java.io.IOException; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestPart; @@ -36,6 +41,27 @@ public class ModelMngApiController { private final ModelMngService modelMngService; + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + @Autowired private ZipUtils zipUtils; @Operation(summary = "모델관리 목록") @@ -70,19 +96,59 @@ public class ModelMngApiController { @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) }) - @DeleteMapping("/{modelVer}") + @DeleteMapping("/{uuid}") public ApiResponseDto removeModel( @io.swagger.v3.oas.annotations.parameters.RequestBody( description = "모델 삭제 요청 정보", required = true) @PathVariable - String modelVer) { - return ApiResponseDto.okObject(modelMngService.removeModel(modelVer)); + String uuid) { + return ApiResponseDto.okObject(modelMngService.removeModel(UUID.fromString(uuid))); } - @Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드") - @PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) - public void upload(@RequestPart MultipartFile zipFilie) throws IOException { - zipUtils.processZip(zipFilie.getInputStream()); + + @Operation(summary = "모델등록") + @PostMapping + public ApiResponseDto ModelMgmt( + @RequestBody @Valid ModelMngDto.AddReq addReq) { + + return ApiResponseDto.ok(modelMngService.insertModel(addReq)); + } + + + @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ApiResponseDto fileChunkUpload( + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { + + String uploadDivi = "model"; + + UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq(); + upAddReqDto.setDatasetId(0L); + upAddReqDto.setUuid(uuid); + upAddReqDto.setFileName(fileName); + upAddReqDto.setFileSize(fileSize); + upAddReqDto.setChunkIndex(chunkIndex); + upAddReqDto.setChunkTotalIndex(chunkTotalIndex); + upAddReqDto.setUploadDivi(uploadDivi); + upAddReqDto.setFinalPath(modelDir); + upAddReqDto.setTempPath(modelTmpDir); + + System.out.println("uuid === "+ uuid); + + return ApiResponseDto.ok(modelMngService.uploadChunkModelFile(upAddReqDto, chunkFile)); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java index ea9da363..27058f85 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java @@ -1,11 +1,13 @@ package com.kamco.cd.kamcoback.model.dto; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import io.swagger.v3.oas.annotations.media.Schema; import java.math.BigDecimal; import java.time.ZonedDateTime; +import java.util.UUID; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; @@ -96,7 +98,6 @@ public class ModelMngDto { @AllArgsConstructor public static class ModelList { - private Integer rowNum; private String modelVer; private String fileName; private String modelType; @@ -108,6 +109,7 @@ public class ModelMngDto { private BigDecimal iou; private String memo; private Boolean deleted; + private UUID uuid; } @Schema(name = "ModelAddReq", description = "모델 등록 req") @@ -118,10 +120,14 @@ public class ModelMngDto { public static class AddReq { private String modelType; - private String dockerFileNm; private String modelVer; - private String hyperVer; - private String epochVer; + private String filePath; + private String fileName; + private String memo; + + @JsonIgnore + private UUID uuid; + } @Schema(name = "searchReq", description = "검색 요청") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java index 02c7e518..d50239ed 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java @@ -3,10 +3,15 @@ package com.kamco.cd.kamcoback.model.service; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.service.UploadService; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; +import org.springframework.web.multipart.MultipartFile; @Service @RequiredArgsConstructor @@ -14,6 +19,29 @@ public class ModelMngService { private final ModelMngCoreService modelMngCoreService; + private final UploadService uploadService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + public Page findModelMgmtList( ModelMngDto.searchReq searchReq, LocalDate startDate, @@ -24,7 +52,27 @@ public class ModelMngService { searchReq, startDate, endDate, modelType, searchVal); } - public ApiResponseDto.ResponseObj removeModel(String modelVer) { - return modelMngCoreService.removeModel(modelVer); + public ApiResponseDto.ResponseObj removeModel(UUID uuid) { + + modelMngCoreService.removeModel(uuid); + + return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다."); } + + + public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) { + UUID uuid = UUID.randomUUID(); + addReq.setUuid(uuid); + modelMngCoreService.insertModel(addReq); + return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다."); + } + + public UploadDto.UploadRes uploadChunkModelFile(UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) + { + UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile); + + return upRes; + } + + } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java index 7acb4c21..87fa15d1 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java @@ -6,6 +6,7 @@ import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository; import jakarta.persistence.EntityNotFoundException; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; @@ -26,14 +27,24 @@ public class ModelMngCoreService { searchReq, startDate, endDate, modelType, searchVal); } - public ApiResponseDto.ResponseObj removeModel(String modelVer) { + public void removeModel(UUID uuid) { + /* ModelMngEntity entity = modelMngRepository - .findByModelUid(modelVer) - .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다. ver: " + modelVer)); - + .findByModelUuid(uuid) + .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다.")); + */ // id 코드 deleted = true 업데이트 - entity.deleted(); - return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, ""); + //entity.deleted(); + + modelMngRepository.deleteByModelUuid(uuid); + } + + public void insertModel(ModelMngDto.AddReq addReq){ + + //ModelMngEntity addEntity = new ModelMngEntity(); + //addEntity.setModelType(addReq.getModelType()); + + modelMngRepository.insertModel(addReq); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java new file mode 100644 index 00000000..f963464c --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java @@ -0,0 +1,56 @@ +package com.kamco.cd.kamcoback.postgres.core; + +import com.kamco.cd.kamcoback.postgres.repository.upload.UploadSessionRepository; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Service; + +@Service +@RequiredArgsConstructor +public class UploadSessionCoreService { + + private final UploadSessionRepository uploadSessionRepository; + + public void createUploadSession(UploadDto.UploadAddReq addReq) + { + /* + UUID newUuid = UUID.randomUUID(); + + UploadSessionEntity entity = new UploadSessionEntity(); + entity.setUploadId(addReq.getUploadId()); + entity.setDatasetId(addReq.getDatasetId()); + entity.setFileName(addReq.getFileName()); + entity.setFileSize(addReq.getFileSize()); + entity.setFinalPath(addReq.getFinalPath()); + entity.setStatus(addReq.getStatus()); + entity.setTempPath(addReq.getTempPath()); + entity.setChunkIndex(addReq.getChunkIndex()); + entity.setChunkTotalIndex(addReq.getChunkTotalIndex()); + entity.setUploadDivi(addReq.getUploadDivi()); + entity.setFileHash(addReq.getFileHash()); + entity.setUuid(newUuid); + + //System.out.println("======================"); + + UploadSessionEntity saved = uploadSessionRepository.save(entity); + + return String.valueOf(saved.getUuid()); + */ + + uploadSessionRepository.insertUploadSession(addReq); + + } + + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi){ + return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi); + } + + public UploadDto.uploadDto findByUuid(UUID uuid){ + return uploadSessionRepository.findByUuid(uuid); + } + + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){ + uploadSessionRepository.updateUploadSessionStatus(addReq); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java index 9c06015f..d97ecf40 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java @@ -8,6 +8,7 @@ import jakarta.persistence.GenerationType; import jakarta.persistence.Id; import jakarta.persistence.Table; import java.time.ZonedDateTime; +import java.util.UUID; import lombok.Getter; import lombok.Setter; @@ -52,6 +53,9 @@ public class ModelMngEntity extends CommonDateEntity { @Column(name = "memo") private String memo; + @Column(name = "uuid") + private UUID uuid; + public void deleted() { this.deleted = true; } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java new file mode 100644 index 00000000..58646a83 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java @@ -0,0 +1,90 @@ +package com.kamco.cd.kamcoback.postgres.entity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; +import java.time.OffsetDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; + +@Getter +@Setter +@Entity +@Table(name = "tb_upload_session") +public class UploadSessionEntity { + + @Id + @Size(max = 100) + @Column(name = "upload_id", nullable = false, length = 100) + private String uploadId; + + @Column(name = "completed_dttm") + private OffsetDateTime completedDttm; + + @NotNull + @ColumnDefault("now()") + @Column(name = "created_dttm", nullable = false) + private OffsetDateTime createdDttm; + + @Column(name = "dataset_id") + private Long datasetId; + + @Column(name = "error_message", length = Integer.MAX_VALUE) + private String errorMessage; + + @Size(max = 255) + @Column(name = "file_name") + private String fileName; + + @ColumnDefault("0") + @Column(name = "file_size") + private Long fileSize; + + @Size(max = 500) + @Column(name = "final_path", length = 500) + private String finalPath; + + @Size(max = 20) + @Column(name = "status", length = 20) + private String status; + + @Size(max = 500) + @Column(name = "temp_path", length = 500) + private String tempPath; + + @Column(name = "chunk_total_index") + private Integer chunkTotalIndex; + + @NotNull + @ColumnDefault("now()") + @Column(name = "updated_dttm", nullable = false) + private OffsetDateTime updatedDttm; + + @Column(name = "chunk_index") + private Integer chunkIndex; + + @Size(max = 50) + @Column(name = "upload_divi", length = 50) + private String uploadDivi; + + @Size(max = 300) + @Column(name = "file_hash", length = 300) + private String fileHash; + + @Column(name = "total_chunks") + private Integer totalChunks; + + @Column(name = "uploaded_chunks") + private Integer uploadedChunks; + + @NotNull + @ColumnDefault("uuid_generate_v4()") + @Column(name = "uuid", nullable = false) + private UUID uuid; + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java index 5d34fdee..e82b4593 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import java.time.LocalDate; import java.util.Optional; +import java.util.UUID; import org.springframework.data.domain.Page; public interface ModelMngRepositoryCustom { @@ -16,4 +17,11 @@ public interface ModelMngRepositoryCustom { String searchVal); Optional findByModelUid(String modelVer); + + Optional findByModelUuid(UUID uuid); + + + void insertModel(ModelMngDto.AddReq addReq); + + void deleteByModelUuid(UUID uuid); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java index 18290f45..92cd36ef 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java @@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.NumberPath; import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.jpa.impl.JPAQueryFactory; +import jakarta.validation.Valid; import java.math.BigDecimal; import java.time.LocalDate; import java.time.LocalDateTime; @@ -22,6 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.UUID; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; @@ -67,8 +69,6 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport .select( Projections.constructor( ModelMngDto.ModelList.class, - Expressions.numberTemplate( - Integer.class, "row_number() over(order by {0} desc)", sortColumn), modelMngEntity.modelVer, modelMngEntity.fileName, modelMngEntity.modelType, @@ -80,7 +80,8 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport roundNumericToPercent(modelResultMetricEntity.loss), roundNumericToPercent(modelResultMetricEntity.iou), modelMngEntity.memo, - modelMngEntity.deleted)) + modelMngEntity.deleted, + modelMngEntity.uuid)) .from(modelMngEntity) .innerJoin(modelResultMetricEntity) .on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid)) @@ -116,6 +117,18 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport .fetchOne()); } + @Override + public Optional findByModelUuid(UUID uuid) { + + System.out.println("uuid == " + uuid); + + return Optional.ofNullable( + queryFactory + .selectFrom(modelMngEntity) + .where(modelMngEntity.uuid.eq(uuid)) + .fetchOne()); + } + private BooleanExpression eventEndedAtBetween( LocalDate startDate, LocalDate endDate, String sortColumn) { if (Objects.isNull(startDate) || Objects.isNull(endDate)) { @@ -155,4 +168,37 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport private Expression roundNumericToPercent(NumberPath ratio) { return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio); } + + @Override + public void insertModel(@Valid ModelMngDto.AddReq addReq) { + long execCount = + queryFactory + .insert(modelMngEntity) + .columns( + modelMngEntity.modelVer, + modelMngEntity.modelType, + modelMngEntity.filePath, + modelMngEntity.fileName, + modelMngEntity.memo, + modelMngEntity.uuid) + .values( + addReq.getModelVer(), + addReq.getModelType(), + addReq.getFilePath(), + addReq.getFileName(), + addReq.getMemo(), + addReq.getUuid()) + .execute(); + } + + @Override + public void deleteByModelUuid(UUID uuid){ + long execCount = + queryFactory + .update(modelMngEntity) + .set(modelMngEntity.deleted, true) + .where(modelMngEntity.uuid.eq(uuid)) + .execute(); + } + } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java new file mode 100644 index 00000000..6499f129 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java @@ -0,0 +1,7 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + +import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface UploadSessionRepository + extends JpaRepository, UploadSessionRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java new file mode 100644 index 00000000..a6502e85 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java @@ -0,0 +1,19 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + + + +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import java.util.UUID; + +public interface UploadSessionRepositoryCustom { + + + void insertUploadSession(UploadDto.UploadAddReq addReq); + + UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi); + + UploadDto.uploadDto findByUuid(UUID uuid); + void updateUploadSessionStatus(UploadDto.UploadAddReq addReq); + + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java new file mode 100644 index 00000000..e6914ee2 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java @@ -0,0 +1,142 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + + +import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity; + +import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.querydsl.core.types.Projections; +import com.querydsl.core.types.dsl.Expressions; +import com.querydsl.core.types.dsl.StringExpression; +import com.querydsl.jpa.impl.JPAQueryFactory; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import java.util.UUID; +import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport; + +public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport + implements UploadSessionRepositoryCustom { + + private final JPAQueryFactory queryFactory; + private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)"); + + @PersistenceContext private EntityManager em; + + public UploadSessionRepositoryImpl(JPAQueryFactory queryFactory) { + super(UploadSessionEntity.class); + this.queryFactory = queryFactory; + } + + @Override + public void insertUploadSession(UploadDto.UploadAddReq addReq) { + long execCnt = + queryFactory + .insert(uploadSessionEntity) + .columns( + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid + ) + .values( + addReq.getUploadId(), + addReq.getDatasetId(), + addReq.getFileName(), + addReq.getFileSize(), + addReq.getFinalPath(), + addReq.getStatus(), + addReq.getTempPath(), + addReq.getChunkIndex(), + addReq.getChunkTotalIndex(), + addReq.getUploadDivi(), + addReq.getFileHash(), + addReq.getUuid() + ) + .execute(); + + + } + + + @Override + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { + + UploadDto.uploadDto foundContent = + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid + )) + .from(uploadSessionEntity) + .where(uploadSessionEntity.datasetId.eq(datasetId) + .and(uploadSessionEntity.uploadDivi.eq(uploadDivi))) + .limit(1) + .fetchOne(); + + return foundContent; + + } + + + @Override + public UploadDto.uploadDto findByUuid(UUID uuid) { + + UploadDto.uploadDto foundContent = + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid + )) + .from(uploadSessionEntity) + .where(uploadSessionEntity.uuid.eq(uuid)) + .limit(1) + .fetchOne(); + + return foundContent; + + } + + + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){ + long fileCount = + queryFactory + .update(uploadSessionEntity) + .set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex()) + .set(uploadSessionEntity.status, addReq.getStatus()) + .where(uploadSessionEntity.uploadId.eq(addReq.getUploadId())) + .execute(); + } + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java new file mode 100644 index 00000000..ceae6eb9 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java @@ -0,0 +1,144 @@ +package com.kamco.cd.kamcoback.upload; + +import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.service.UploadService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RequestPart; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@Tag(name = "파일 업로드", description = "대용량 파일 업로드 API") +@RestController +@RequestMapping("/api/upload") +@RequiredArgsConstructor +public class UploadApiController { + + private final UploadService uploadService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + + /* + @Operation(summary = "데이터셋 대용량 업로드 세션 시작", description = "데이터셋 대용량 파일 업로드 세션을 시작합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "세션 생성 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = UploadDto.InitRes.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/chunk-upload-init") + public ApiResponseDto initUpload( + @RequestBody @Valid UploadDto.InitReq initReq) { + return ApiResponseDto.createOK(uploadService.initUpload(initReq)); + } + */ + + @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ApiResponseDto fileChunkUpload( + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { + + String uploadDivi = "dataset"; + + UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq(); + upAddReqDto.setDatasetId(0L); + upAddReqDto.setUuid(uuid); + upAddReqDto.setFileName(fileName); + upAddReqDto.setFileSize(fileSize); + upAddReqDto.setChunkIndex(chunkIndex); + upAddReqDto.setChunkTotalIndex(chunkTotalIndex); + upAddReqDto.setUploadDivi(uploadDivi); + upAddReqDto.setFinalPath(datasetDir); + upAddReqDto.setTempPath(datasetTmpDir); + + System.out.println("uuid === "+ uuid); + + return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile)); + } + + @Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "업로드 완료 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PutMapping("/chunk-upload-complete/{uuid}") + public ApiResponseDto completeUpload( + @PathVariable UUID uuid) { + return ApiResponseDto.ok(uploadService.completeUpload(uuid)); + } + + /* + @Operation(summary = "업로드 상태 조회", description = "업로드 진행 상태를 조회합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "조회 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = UploadDto.Status.class))), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/status") + public ApiResponseDto getUploadStatus( + @RequestBody @Valid UploadDto.StatusReq statusReq) { + return ApiResponseDto.ok(uploadService.getUploadStatus(statusReq)); + } + */ +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java new file mode 100644 index 00000000..7096c87c --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java @@ -0,0 +1,224 @@ +package com.kamco.cd.kamcoback.upload.dto; + +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotBlank; +import java.util.UUID; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +public class UploadDto { + + @Schema(name = "InitReq", description = "업로드(Chunk) 세션 초기화") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class InitReq { + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "총 청크 수", example = "100") + private Integer chunkTotalIndex; + + /* + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + */ + + @Schema(description = "업로드구분", example = "model") + private String uploadDivi; + } + + @Schema(name = "UploadAddReq", description = "업로드 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class UploadAddReq { + @Schema(description = "업로드 ID", example = "각데이터의 식별키") + private String uploadId; + + @Schema(description = "데이터식별키", example = "129227333") + private Long datasetId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "파일명", example = "data.zip") + private String finalPath; + + @Schema(description = "업로드구분", example = "dataset") + private String uploadDivi; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "임시저장경로") + private String tempPath; + + @Schema(description = "업로드 청크 Index", example = "50") + private Integer chunkIndex; + + @Schema(description = "총 청크 수", example = "100") + private Integer chunkTotalIndex; + + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + + @Schema(description = "uuid", example = "303d4e24-1726-4272-bbc7-01ab85692b80") + private UUID uuid; + } + + @Schema(name = "UploadCompleteReq", description = "업로드 완료 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class CompleteReq { + + @NotBlank(message = "업로드 ID는 필수입니다") + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + } + + @Schema(name = "UploadStatusReq", description = "업로드 상태 조회 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class StatusReq { + + @NotBlank(message = "업로드 ID는 필수입니다") + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + } + + @Schema(name = "UploadStatus", description = "업로드 상태 정보") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class Status { + + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "총 청크 수", example = "100") + private Integer totalChunks; + + @Schema(description = "업로드된 청크 수", example = "50") + private Integer uploadedChunks; + + @Schema(description = "진행률 (%)", example = "50.0") + private Double progress; + + @Schema(description = "에러 메시지", example = "") + private String errorMessage; + } + + @Schema(name = "UploadAddReq", description = "업로드 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class uploadDto { + @Schema(description = "업로드 ID", example = "각데이터의 식별키") + private String uploadId; + + @Schema(description = "데이터식별키", example = "129227333") + private Long datasetId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "파일명", example = "data.zip") + private String finalPath; + + @Schema(description = "업로드구분", example = "dataset") + private String uploadDivi; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "임시저장경로") + private String tempPath; + + @Schema(description = "업로드 청크 Index", example = "50") + private Integer chunkIndex; + + @Schema(description = "총 청크 Index", example = "100") + private Integer chunkTotalIndex; + + + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + + @Schema(description = "uuid") + private UUID uuid; + + } + + @Schema(name = "UploadRes", description = "업로드 수행 후 리턴") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class UploadRes { + private String res; + private String resMsg; + private UUID uuid; + private String filePath; + private String fileName; + private int chunkIndex; + private int chunkTotalIndex; + + public double getUploadRate() { + if (this.chunkTotalIndex == 0) { + return 0.0; + } + return (double) (this.chunkIndex+1) / (this.chunkTotalIndex+1) * 100.0; + } + + } + + + + @Schema(name = "DmlReturn", description = "수행 후 리턴") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class DmlReturn { + + private String flag; + private String message; + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java new file mode 100644 index 00000000..de6c3dae --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java @@ -0,0 +1,252 @@ +package com.kamco.cd.kamcoback.upload.service; + +import com.kamco.cd.kamcoback.common.enums.FileUploadStatus; +import com.kamco.cd.kamcoback.common.utils.FIleChecker; +import com.kamco.cd.kamcoback.postgres.core.UploadSessionCoreService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.dto.UploadDto.DmlReturn; +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +@Slf4j +@Service +@RequiredArgsConstructor +public class UploadService { + + private final UploadSessionCoreService uploadSessionCoreService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + + @Transactional + public DmlReturn initUpload(UploadDto.InitReq initReq) { + + return new DmlReturn("success", "UPLOAD CHUNK INIT"); + } + + @Transactional + public UploadDto.UploadRes uploadChunk(UploadDto.UploadAddReq upAddReqDto, MultipartFile file) { + + UploadDto.UploadRes upRes = new UploadDto.UploadRes(); + + long datasetId = 0; + if( upAddReqDto.getDatasetId() != null )datasetId = upAddReqDto.getDatasetId(); + String uploadId = System.currentTimeMillis()+""; + //UUID uuid = UUID.randomUUID(); + UUID uuid = upAddReqDto.getUuid(); + String tmpDataSetDir = upAddReqDto.getTempPath()+uuid; + String fianlDir = upAddReqDto.getFinalPath()+uuid; + String uploadDivi = upAddReqDto.getUploadDivi(); + //String fileName = file.getOriginalFilename(); + String fileName = upAddReqDto.getFileName(); + Integer chunkIndex = upAddReqDto.getChunkIndex(); + Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex(); + String status = FileUploadStatus.INIT.name(); + + upRes.setUuid(uuid); + upRes.setFilePath(fianlDir); + upRes.setFileName(fileName); + + upAddReqDto.setUuid(uuid); + upAddReqDto.setUploadId(uploadId); + upAddReqDto.setStatus(status); + upAddReqDto.setFileName(fileName); + upAddReqDto.setTempPath(tmpDataSetDir); + upAddReqDto.setFinalPath(fianlDir); + + //세션 신규,중복체크(초기화 포함) + UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes); + if( !upRes.getRes().equals("success") )return upRes; + + + status = FileUploadStatus.UPLOADING.name(); + upAddReqDto.setStatus(status); + + if( dto != null ) + { + tmpDataSetDir = dto.getTempPath(); + fianlDir = dto.getFinalPath(); + } + + //폴더 생성 및 체크 + if( ! checkChunkFoler(upRes, tmpDataSetDir, fianlDir) )return upRes; + + //chunk저장하기 + if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) ) + { + upRes.setRes("fail"); + upRes.setResMsg("chunkIndex:"+chunkIndex+" 업로드 애러"); + } + + if( chunkIndex == chunkTotalIndex ) { + + upAddReqDto.setUploadId(dto.getUploadId()); + upAddReqDto.setStatus(FileUploadStatus.DONE.name()); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + + + try { + this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex); + + upAddReqDto.setUploadId(dto.getUploadId()); + upAddReqDto.setStatus("MERGED"); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + + } catch (IOException e) { + //throw new RuntimeException(e); + upRes.setRes("fail"); + upRes.setResMsg("파일Chunk 병합(merge) 애러"); + return upRes; + } + + + + } + + return upRes; + } + + @Transactional + public UploadDto.UploadRes completeUpload(UUID uuid) { + + UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid); + UploadDto.UploadRes upRes = new UploadDto.UploadRes(); + upRes.setRes("success"); + upRes.setResMsg("병합(merge) 정상처리되었습니다."); + upRes.setUuid(uuid); + upRes.setFilePath(dto.getFinalPath()); + upRes.setFileName(dto.getFileName()); + upRes.setChunkIndex(dto.getChunkIndex()); + upRes.setChunkTotalIndex(dto.getChunkTotalIndex()); + + try { + this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex()); + } catch (IOException e) { + + upRes.setRes("fail"); + upRes.setResMsg("병합(merge) 애러"); + + return upRes; + } + + return upRes; + + } + + public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) + { + + if( ! FIleChecker.mkDir(tmpDataSetDir) ) + { + upRes.setRes("fail"); + upRes.setRes("CHUNK 폴더 생성 ERROR"); + return false; + } + + if( ! FIleChecker.mkDir(fianlDir) ) + { + upRes.setRes("fail"); + upRes.setRes("업로드 완료 폴더 생성 ERROR"); + return false; + } + + return true; + } + + + public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) { + + upRes.setRes("success"); + upRes.setResMsg("정상처리되었습니다."); + + UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid()); + + if( upAddReqDto.getChunkIndex() == 0 ) { + if( dto != null ) + { + upRes.setRes("duplicate"); + upRes.setResMsg("이미 진행중인 업로드세션입니다."); + return dto; + } + + upAddReqDto.setStatus("UPLOADING"); + upRes.setUuid( upAddReqDto.getUuid() ); + uploadSessionCoreService.createUploadSession(upAddReqDto); + } + else { + if( dto == null ){ + upRes.setRes("nosession"); + upRes.setResMsg("업로드 세션이 존재하지 않습니다."); + return dto; + } + + upAddReqDto.setStatus("UPLOADING"); + upAddReqDto.setUploadId(dto.getUploadId()); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + } + + if( dto != null )upRes.setUuid( dto.getUuid() ); + + upRes.setChunkIndex(upAddReqDto.getChunkIndex()); + upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex()); + + + return dto; + + } + + public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException { + + Path outputPath = Paths.get(fianlDir, fileName); + try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { + for (int i = 0; i <= chunkTotalIndex; i++) { + Path chunkPath = Paths.get(tmpDir, i+""); + + try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) { + long transferred = 0; + long size = inChannel.size(); + while (transferred < size) { + transferred += inChannel.transferTo(transferred, size - transferred, outChannel); + } + } + // 병합 후 즉시 삭제하여 디스크 공간 확보 + Files.delete(chunkPath); + } + } + + //병합후 임시 폴더 삭제 + FIleChecker.deleteFolder(tmpDir); + + } + + +} diff --git a/src/main/resources/static/chunk_upload_test.html b/src/main/resources/static/chunk_upload_test.html new file mode 100644 index 00000000..e05993e7 --- /dev/null +++ b/src/main/resources/static/chunk_upload_test.html @@ -0,0 +1,137 @@ + + + + + Chunk Upload Test + + +

대용량 파일 청크 업로드 테스트

+ +* Chunk 테스트 사이즈 10M (10 * 1024 * 1024) - 성능에 따라 변경가능

+ +* 업로드 API선택

+ +

+* 파일첨부

+

+ +



+* 업로드시 업로드 이력을 추적하기 위해 UUID생성해서 전달(파일병합시 사용)(script 예제참고)

+UUID :

+ +* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)

+chunkIndex :

+chunkTotalIndex :

+ +* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)

+fileSize :

+ + + +

+* 진행율(%)

+
+

+* 결과메세지

+
+ + + +