diff --git a/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java b/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java new file mode 100644 index 00000000..d6c1b004 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java @@ -0,0 +1,28 @@ +package com.kamco.cd.kamcoback.common.enums; + +import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; +import com.kamco.cd.kamcoback.common.utils.enums.EnumType; +import lombok.AllArgsConstructor; +import lombok.Getter; + +@CodeExpose +@Getter +@AllArgsConstructor +public enum FileUploadStatus implements EnumType { + INIT("초기화"), + UPLOADING("업로드중"), + DONE("업로드완료"), + MERGED("병합완료"); + + private final String desc; + + @Override + public String getId() { + return name(); + } + + @Override + public String getText() { + return desc; + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java index ce622964..21f0f669 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java @@ -29,6 +29,7 @@ import lombok.Getter; import org.apache.commons.io.FilenameUtils; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.gce.geotiff.GeoTiffReader; +import org.springframework.util.FileSystemUtils; import org.springframework.web.multipart.MultipartFile; public class FIleChecker { @@ -489,6 +490,24 @@ public class FIleChecker { return true; } + public static boolean multipartChunkSaveTo( + MultipartFile mfile, String targetPath, int chunkIndex) { + File dest = new File(targetPath, String.valueOf(chunkIndex)); + + boolean fileUpload = true; + try { + mfile.transferTo(dest); + } catch (IOException e) { + return false; + } + + return true; + } + + public static boolean deleteFolder(String path) { + return FileSystemUtils.deleteRecursively(new File(path)); + } + public static boolean validationMultipart(MultipartFile mfile) { // 파일 유효성 검증 if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) { diff --git a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java index 8fa98624..6977cf0f 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java @@ -44,6 +44,8 @@ public class SecurityConfig { .authorizeHttpRequests( auth -> auth + // .requestMatchers("/chunk_upload_test.html").authenticated() + // 맵시트 영역 전체 허용 (우선순위 최상단) .requestMatchers("/api/mapsheet/**") .permitAll() @@ -77,7 +79,11 @@ public class SecurityConfig { "/api/auth/logout", "/swagger-ui/**", "/api/members/*/password", - "/v3/api-docs/**") + "/v3/api-docs/**", + "/chunk_upload_test.html", + "/api/model/file-chunk-upload", + "/api/upload/file-chunk-upload", + "/api/upload/chunk-upload-complete") .permitAll() // 로그인한 사용자만 가능 IAM .requestMatchers("/api/user/**", "/api/my/menus", "/api/code/**") diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java index a5a82719..3325bd64 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java @@ -11,10 +11,13 @@ import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.validation.Valid; import java.time.LocalDate; import lombok.RequiredArgsConstructor; import org.springframework.data.domain.Page; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @@ -59,6 +62,30 @@ public class InferenceResultApiController { return ApiResponseDto.ok(analResList); } + @Operation(summary = "변화탐지 실행 정보 입력", description = "어드민 홈 > 추론관리 > 추론목록 > 변화탐지 실행 정보 입력") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "변화탐지 실행 정보 생성 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Void.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/reg") + public ApiResponseDto saveInferenceInfo( + @io.swagger.v3.oas.annotations.parameters.RequestBody( + description = "변화탐지 실행 정보 저장 요청 정보", + required = true) + @RequestBody + @Valid + InferenceResultDto.RegReq req) { + return ApiResponseDto.ok(null); + } + // @ApiResponses( // value = { // @ApiResponse( diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java index 133b38cf..26efc23d 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java @@ -1,9 +1,14 @@ package com.kamco.cd.kamcoback.inference.dto; import com.kamco.cd.kamcoback.common.utils.enums.EnumType; +import com.kamco.cd.kamcoback.common.utils.interfaces.EnumValid; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; import java.time.LocalDate; import java.time.ZonedDateTime; +import java.util.List; import java.util.UUID; import lombok.AllArgsConstructor; import lombok.Getter; @@ -14,6 +19,7 @@ import org.springframework.data.domain.Pageable; public class InferenceResultDto { + /** 목록조회 dto */ @Getter @Setter @AllArgsConstructor @@ -32,6 +38,7 @@ public class InferenceResultDto { @JsonFormatDttm private ZonedDateTime applyDttm; } + /** 목록조회 검색 조건 dto */ @Getter @Setter @NoArgsConstructor @@ -53,6 +60,27 @@ public class InferenceResultDto { } } + /** 탐지 데이터 옵션 dto */ + @Getter + @AllArgsConstructor + public enum MapSheetScope implements EnumType { + EXCL("추론제외"), + PREV("이전 년도 도엽 사용"), + ; + private final String desc; + + @Override + public String getId() { + return name(); + } + + @Override + public String getText() { + return desc; + } + } + + /** 분석대상 도엽 enum */ @Getter @AllArgsConstructor public enum DetectOption implements EnumType { @@ -72,22 +100,51 @@ public class InferenceResultDto { } } + /** 변화탐지 실행 정보 저장 요청 정보 */ @Getter + @Setter + @NoArgsConstructor @AllArgsConstructor - public enum MapSheetScope implements EnumType { - EXCL("추론제외"), - PREV("이전 년도 도엽 사용"), - ; - private final String desc; + public static class RegReq { - @Override - public String getId() { - return name(); - } + @Schema(description = "제목", example = "2025-2026 track changes Pororo") + @NotBlank + private String title; - @Override - public String getText() { - return desc; - } + @Schema(description = "M1", example = "2") + @NotNull + private Long model1Uid; + + @Schema(description = "M2", example = "4") + @NotNull + private Long model2Uid; + + @Schema(description = "M3", example = "7") + @NotNull + private Long model3Uid; + + @Schema(description = "비교년도", example = "2003") + @NotNull + private Integer compareYyyy; + + @Schema(description = "탐지년도", example = "2004") + @NotNull + private Integer targetYyyy; + + @Schema(description = "탐지 데이터 옵션 - 추론제외(PREV), 이전 년도 도엽 사용(PREV)", example = "EXCL") + @NotBlank + @EnumValid( + enumClass = MapSheetScope.class, + message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") + private String mapSheetScope; + + @Schema(description = "분석대상 도엽 - 전체(ALL), 부분(PART)", example = "PART") + @NotBlank + @EnumValid(enumClass = DetectOption.class, message = "분석대상 도엽 옵션은 '전체', '부분' 만 사용 가능합니다.") + private String detectOption; + + @Schema(description = "5k 도협 번호 목록", example = "[34607067,34607067]") + @NotNull + private List mapSheetNum; } } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java index 285e1527..aa2a0daa 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java @@ -31,6 +31,15 @@ public class InferenceResultService { return inferenceResultCoreService.getInferenceResultList(req); } + /** + * 변화탐지 실행 정보 생성 + * + * @param req + */ + public void saveInferenceInfo(InferenceResultDto.RegReq req) { + inferenceResultCoreService.saveInferenceInfo(req); + } + /** * 분석결과 요약정보 * diff --git a/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java b/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java index db764638..a945e5f0 100644 --- a/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java @@ -27,7 +27,8 @@ public class LabelAllocateDto { LABEL_COMPLETE("라벨완료"), INSPECT_REQ("검수요청"), INSPECT_ING("검수진행중"), - INSPECT_COMPLETE("검수완료"); + INSPECT_COMPLETE("검수완료"), + FINISH("종료"); private String desc; diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index 1dcb01b6..28914ec1 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.service.ModelMngService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Schema; @@ -11,16 +12,19 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.transaction.Transactional; -import java.io.IOException; +import jakarta.validation.Valid; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestPart; @@ -36,6 +40,27 @@ public class ModelMngApiController { private final ModelMngService modelMngService; + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + @Autowired private ZipUtils zipUtils; @Operation(summary = "모델관리 목록") @@ -56,12 +81,12 @@ public class ModelMngApiController { return ApiResponseDto.ok(result); } - @Operation(summary = "삭제", description = "모델을 삭제 합니다.") + @Operation(summary = "모델삭제", description = "모델을 삭제 합니다.") @ApiResponses( value = { @ApiResponse( - responseCode = "204", - description = "모델 삭제 성공", + responseCode = "201", + description = "등록 성공", content = @Content( mediaType = "application/json", @@ -70,19 +95,71 @@ public class ModelMngApiController { @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) }) - @DeleteMapping("/{modelVer}") + @DeleteMapping("/{uuid}") public ApiResponseDto removeModel( @io.swagger.v3.oas.annotations.parameters.RequestBody( description = "모델 삭제 요청 정보", required = true) @PathVariable - String modelVer) { - return ApiResponseDto.okObject(modelMngService.removeModel(modelVer)); + String uuid) { + + return ApiResponseDto.ok(modelMngService.removeModel(UUID.fromString(uuid))); } - @Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드") - @PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) - public void upload(@RequestPart MultipartFile zipFilie) throws IOException { - zipUtils.processZip(zipFilie.getInputStream()); + @Operation(summary = "모델등록", description = "모델을 등록 합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "등록 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Long.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping + public ApiResponseDto ModelMgmt( + @RequestBody @Valid ModelMngDto.AddReq addReq) { + + return ApiResponseDto.ok(modelMngService.insertModel(addReq)); + } + + @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ApiResponseDto fileChunkUpload( + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { + + String uploadDivi = "model"; + + UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq(); + upAddReqDto.setDatasetId(0L); + upAddReqDto.setUuid(uuid); + upAddReqDto.setFileName(fileName); + upAddReqDto.setFileSize(fileSize); + upAddReqDto.setChunkIndex(chunkIndex); + upAddReqDto.setChunkTotalIndex(chunkTotalIndex); + upAddReqDto.setUploadDivi(uploadDivi); + upAddReqDto.setFinalPath(modelDir); + upAddReqDto.setTempPath(modelTmpDir); + + System.out.println("uuid === " + uuid); + + return ApiResponseDto.ok(modelMngService.uploadChunkModelFile(upAddReqDto, chunkFile)); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java index ea9da363..96e1ba56 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java @@ -1,11 +1,13 @@ package com.kamco.cd.kamcoback.model.dto; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import io.swagger.v3.oas.annotations.media.Schema; import java.math.BigDecimal; import java.time.ZonedDateTime; +import java.util.UUID; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; @@ -96,7 +98,6 @@ public class ModelMngDto { @AllArgsConstructor public static class ModelList { - private Integer rowNum; private String modelVer; private String fileName; private String modelType; @@ -108,6 +109,7 @@ public class ModelMngDto { private BigDecimal iou; private String memo; private Boolean deleted; + private UUID uuid; } @Schema(name = "ModelAddReq", description = "모델 등록 req") @@ -118,10 +120,12 @@ public class ModelMngDto { public static class AddReq { private String modelType; - private String dockerFileNm; private String modelVer; - private String hyperVer; - private String epochVer; + private String filePath; + private String fileName; + private String memo; + + @JsonIgnore private UUID uuid; } @Schema(name = "searchReq", description = "검색 요청") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java index 02c7e518..87534f62 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java @@ -3,10 +3,15 @@ package com.kamco.cd.kamcoback.model.service; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.service.UploadService; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; +import org.springframework.web.multipart.MultipartFile; @Service @RequiredArgsConstructor @@ -14,6 +19,29 @@ public class ModelMngService { private final ModelMngCoreService modelMngCoreService; + private final UploadService uploadService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + public Page findModelMgmtList( ModelMngDto.searchReq searchReq, LocalDate startDate, @@ -24,7 +52,24 @@ public class ModelMngService { searchReq, startDate, endDate, modelType, searchVal); } - public ApiResponseDto.ResponseObj removeModel(String modelVer) { - return modelMngCoreService.removeModel(modelVer); + public ApiResponseDto.ResponseObj removeModel(UUID uuid) { + + modelMngCoreService.removeModel(uuid); + + return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다."); + } + + public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) { + UUID uuid = UUID.randomUUID(); + addReq.setUuid(uuid); + modelMngCoreService.insertModel(addReq); + return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다."); + } + + public UploadDto.UploadRes uploadChunkModelFile( + UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) { + UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile); + + return upRes; } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java index 1b4443d4..34fe6e51 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java @@ -1,5 +1,6 @@ package com.kamco.cd.kamcoback.postgres.core; +import com.fasterxml.jackson.databind.ObjectMapper; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet; @@ -16,6 +17,7 @@ import jakarta.persistence.EntityNotFoundException; import jakarta.validation.constraints.NotNull; import java.util.List; import lombok.RequiredArgsConstructor; +import org.locationtech.jts.io.geojson.GeoJsonWriter; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -28,6 +30,9 @@ public class InferenceResultCoreService { private final MapSheetLearnRepository mapSheetLearnRepository; private final MapInkx5kRepository mapInkx5kRepository; + private final ObjectMapper objectMapper = new ObjectMapper(); + private final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); + /** * 추론관리 목록 * @@ -39,6 +44,20 @@ public class InferenceResultCoreService { return list.map(MapSheetLearnEntity::toDto); } + /** + * 변화탐지 실행 정보 생성 + * + * @param req + */ + public void saveInferenceInfo(InferenceResultDto.RegReq req) { + MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity(); + mapSheetLearnEntity.setTitle(req.getTitle()); + mapSheetLearnEntity.setM1ModelUid(req.getModel1Uid()); + mapSheetLearnEntity.setM2ModelUid(req.getModel2Uid()); + mapSheetLearnEntity.setM3ModelUid(req.getModel3Uid()); + // mapSheetLearnRepository.save() + } + /****/ /** diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java index 7acb4c21..d803a756 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java @@ -1,11 +1,9 @@ package com.kamco.cd.kamcoback.postgres.core; -import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; -import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository; -import jakarta.persistence.EntityNotFoundException; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; @@ -26,14 +24,24 @@ public class ModelMngCoreService { searchReq, startDate, endDate, modelType, searchVal); } - public ApiResponseDto.ResponseObj removeModel(String modelVer) { + public void removeModel(UUID uuid) { + /* ModelMngEntity entity = modelMngRepository - .findByModelUid(modelVer) - .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다. ver: " + modelVer)); - + .findByModelUuid(uuid) + .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다.")); + */ // id 코드 deleted = true 업데이트 - entity.deleted(); - return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, ""); + // entity.deleted(); + + modelMngRepository.deleteByModelUuid(uuid); + } + + public void insertModel(ModelMngDto.AddReq addReq) { + + // ModelMngEntity addEntity = new ModelMngEntity(); + // addEntity.setModelType(addReq.getModelType()); + + modelMngRepository.insertModel(addReq); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java new file mode 100644 index 00000000..85c0612d --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java @@ -0,0 +1,54 @@ +package com.kamco.cd.kamcoback.postgres.core; + +import com.kamco.cd.kamcoback.postgres.repository.upload.UploadSessionRepository; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Service; + +@Service +@RequiredArgsConstructor +public class UploadSessionCoreService { + + private final UploadSessionRepository uploadSessionRepository; + + public void createUploadSession(UploadDto.UploadAddReq addReq) { + /* + UUID newUuid = UUID.randomUUID(); + + UploadSessionEntity entity = new UploadSessionEntity(); + entity.setUploadId(addReq.getUploadId()); + entity.setDatasetId(addReq.getDatasetId()); + entity.setFileName(addReq.getFileName()); + entity.setFileSize(addReq.getFileSize()); + entity.setFinalPath(addReq.getFinalPath()); + entity.setStatus(addReq.getStatus()); + entity.setTempPath(addReq.getTempPath()); + entity.setChunkIndex(addReq.getChunkIndex()); + entity.setChunkTotalIndex(addReq.getChunkTotalIndex()); + entity.setUploadDivi(addReq.getUploadDivi()); + entity.setFileHash(addReq.getFileHash()); + entity.setUuid(newUuid); + + //System.out.println("======================"); + + UploadSessionEntity saved = uploadSessionRepository.save(entity); + + return String.valueOf(saved.getUuid()); + */ + + uploadSessionRepository.insertUploadSession(addReq); + } + + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { + return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi); + } + + public UploadDto.uploadDto findByUuid(UUID uuid) { + return uploadSessionRepository.findByUuid(uuid); + } + + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) { + uploadSessionRepository.updateUploadSessionStatus(addReq); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java index 9c06015f..d97ecf40 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java @@ -8,6 +8,7 @@ import jakarta.persistence.GenerationType; import jakarta.persistence.Id; import jakarta.persistence.Table; import java.time.ZonedDateTime; +import java.util.UUID; import lombok.Getter; import lombok.Setter; @@ -52,6 +53,9 @@ public class ModelMngEntity extends CommonDateEntity { @Column(name = "memo") private String memo; + @Column(name = "uuid") + private UUID uuid; + public void deleted() { this.deleted = true; } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java new file mode 100644 index 00000000..18a93105 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java @@ -0,0 +1,89 @@ +package com.kamco.cd.kamcoback.postgres.entity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; +import java.time.OffsetDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; + +@Getter +@Setter +@Entity +@Table(name = "tb_upload_session") +public class UploadSessionEntity { + + @Id + @Size(max = 100) + @Column(name = "upload_id", nullable = false, length = 100) + private String uploadId; + + @Column(name = "completed_dttm") + private OffsetDateTime completedDttm; + + @NotNull + @ColumnDefault("now()") + @Column(name = "created_dttm", nullable = false) + private OffsetDateTime createdDttm; + + @Column(name = "dataset_id") + private Long datasetId; + + @Column(name = "error_message", length = Integer.MAX_VALUE) + private String errorMessage; + + @Size(max = 255) + @Column(name = "file_name") + private String fileName; + + @ColumnDefault("0") + @Column(name = "file_size") + private Long fileSize; + + @Size(max = 500) + @Column(name = "final_path", length = 500) + private String finalPath; + + @Size(max = 20) + @Column(name = "status", length = 20) + private String status; + + @Size(max = 500) + @Column(name = "temp_path", length = 500) + private String tempPath; + + @Column(name = "chunk_total_index") + private Integer chunkTotalIndex; + + @NotNull + @ColumnDefault("now()") + @Column(name = "updated_dttm", nullable = false) + private OffsetDateTime updatedDttm; + + @Column(name = "chunk_index") + private Integer chunkIndex; + + @Size(max = 50) + @Column(name = "upload_divi", length = 50) + private String uploadDivi; + + @Size(max = 300) + @Column(name = "file_hash", length = 300) + private String fileHash; + + @Column(name = "total_chunks") + private Integer totalChunks; + + @Column(name = "uploaded_chunks") + private Integer uploadedChunks; + + @NotNull + @ColumnDefault("uuid_generate_v4()") + @Column(name = "uuid", nullable = false) + private UUID uuid; +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java index 57f28ff5..4530a9d7 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java @@ -57,7 +57,6 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { QLabelingAssignmentEntity.labelingAssignmentEntity; private final QMemberEntity memberEntity = QMemberEntity.memberEntity; - /** * 변화탐지 년도 셀렉트박스 조회 * @@ -81,8 +80,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { } /** - * 라벨링 작업관리 목록 조회 - * (복잡한 집계 쿼리로 인해 DTO 직접 반환) + * 라벨링 작업관리 목록 조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환) * * @param searchReq 검색 조건 * @return 라벨링 작업관리 목록 페이지 @@ -342,7 +340,9 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { mapSheetAnalInferenceEntity .uuid .eq(uuid) - .and(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))) + .and( + labelingAssignmentEntity.analUid.eq( + mapSheetAnalInferenceEntity.id))) .innerJoin(memberEntity) .on(whereSubBuilder) .where(whereBuilder) @@ -387,8 +387,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { } /** - * 작업배정 상세조회 - * (복잡한 집계 쿼리로 인해 DTO 직접 반환) + * 작업배정 상세조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환) * * @param uuid 작업배정 UUID * @return 작업배정 상세 정보 diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java index 5d34fdee..b84ea5d3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import java.time.LocalDate; import java.util.Optional; +import java.util.UUID; import org.springframework.data.domain.Page; public interface ModelMngRepositoryCustom { @@ -16,4 +17,10 @@ public interface ModelMngRepositoryCustom { String searchVal); Optional findByModelUid(String modelVer); + + Optional findByModelUuid(UUID uuid); + + void insertModel(ModelMngDto.AddReq addReq); + + void deleteByModelUuid(UUID uuid); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java index 18290f45..db15f3e4 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java @@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.NumberPath; import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.jpa.impl.JPAQueryFactory; +import jakarta.validation.Valid; import java.math.BigDecimal; import java.time.LocalDate; import java.time.LocalDateTime; @@ -22,6 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.UUID; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; @@ -67,8 +69,6 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport .select( Projections.constructor( ModelMngDto.ModelList.class, - Expressions.numberTemplate( - Integer.class, "row_number() over(order by {0} desc)", sortColumn), modelMngEntity.modelVer, modelMngEntity.fileName, modelMngEntity.modelType, @@ -80,7 +80,8 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport roundNumericToPercent(modelResultMetricEntity.loss), roundNumericToPercent(modelResultMetricEntity.iou), modelMngEntity.memo, - modelMngEntity.deleted)) + modelMngEntity.deleted, + modelMngEntity.uuid)) .from(modelMngEntity) .innerJoin(modelResultMetricEntity) .on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid)) @@ -116,6 +117,15 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport .fetchOne()); } + @Override + public Optional findByModelUuid(UUID uuid) { + + System.out.println("uuid == " + uuid); + + return Optional.ofNullable( + queryFactory.selectFrom(modelMngEntity).where(modelMngEntity.uuid.eq(uuid)).fetchOne()); + } + private BooleanExpression eventEndedAtBetween( LocalDate startDate, LocalDate endDate, String sortColumn) { if (Objects.isNull(startDate) || Objects.isNull(endDate)) { @@ -155,4 +165,36 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport private Expression roundNumericToPercent(NumberPath ratio) { return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio); } + + @Override + public void insertModel(@Valid ModelMngDto.AddReq addReq) { + long execCount = + queryFactory + .insert(modelMngEntity) + .columns( + modelMngEntity.modelVer, + modelMngEntity.modelType, + modelMngEntity.filePath, + modelMngEntity.fileName, + modelMngEntity.memo, + modelMngEntity.uuid) + .values( + addReq.getModelVer(), + addReq.getModelType(), + addReq.getFilePath(), + addReq.getFileName(), + addReq.getMemo(), + addReq.getUuid()) + .execute(); + } + + @Override + public void deleteByModelUuid(UUID uuid) { + long execCount = + queryFactory + .update(modelMngEntity) + .set(modelMngEntity.deleted, true) + .where(modelMngEntity.uuid.eq(uuid)) + .execute(); + } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java new file mode 100644 index 00000000..6499f129 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java @@ -0,0 +1,7 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + +import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface UploadSessionRepository + extends JpaRepository, UploadSessionRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java new file mode 100644 index 00000000..0fae2b64 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java @@ -0,0 +1,15 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import java.util.UUID; + +public interface UploadSessionRepositoryCustom { + + void insertUploadSession(UploadDto.UploadAddReq addReq); + + UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi); + + UploadDto.uploadDto findByUuid(UUID uuid); + + void updateUploadSessionStatus(UploadDto.UploadAddReq addReq); +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java new file mode 100644 index 00000000..0594ae94 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java @@ -0,0 +1,132 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + +import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity; + +import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.querydsl.core.types.Projections; +import com.querydsl.core.types.dsl.Expressions; +import com.querydsl.core.types.dsl.StringExpression; +import com.querydsl.jpa.impl.JPAQueryFactory; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import java.util.UUID; +import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport; + +public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport + implements UploadSessionRepositoryCustom { + + private final JPAQueryFactory queryFactory; + private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)"); + + @PersistenceContext private EntityManager em; + + public UploadSessionRepositoryImpl(JPAQueryFactory queryFactory) { + super(UploadSessionEntity.class); + this.queryFactory = queryFactory; + } + + @Override + public void insertUploadSession(UploadDto.UploadAddReq addReq) { + long execCnt = + queryFactory + .insert(uploadSessionEntity) + .columns( + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid) + .values( + addReq.getUploadId(), + addReq.getDatasetId(), + addReq.getFileName(), + addReq.getFileSize(), + addReq.getFinalPath(), + addReq.getStatus(), + addReq.getTempPath(), + addReq.getChunkIndex(), + addReq.getChunkTotalIndex(), + addReq.getUploadDivi(), + addReq.getFileHash(), + addReq.getUuid()) + .execute(); + } + + @Override + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { + + UploadDto.uploadDto foundContent = + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid)) + .from(uploadSessionEntity) + .where( + uploadSessionEntity + .datasetId + .eq(datasetId) + .and(uploadSessionEntity.uploadDivi.eq(uploadDivi))) + .limit(1) + .fetchOne(); + + return foundContent; + } + + @Override + public UploadDto.uploadDto findByUuid(UUID uuid) { + + UploadDto.uploadDto foundContent = + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid)) + .from(uploadSessionEntity) + .where(uploadSessionEntity.uuid.eq(uuid)) + .limit(1) + .fetchOne(); + + return foundContent; + } + + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) { + long fileCount = + queryFactory + .update(uploadSessionEntity) + .set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex()) + .set(uploadSessionEntity.status, addReq.getStatus()) + .where(uploadSessionEntity.uploadId.eq(addReq.getUploadId())) + .execute(); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java new file mode 100644 index 00000000..97e4ce51 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java @@ -0,0 +1,143 @@ +package com.kamco.cd.kamcoback.upload; + +import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.service.UploadService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RequestPart; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@Tag(name = "파일 업로드", description = "대용량 파일 업로드 API") +@RestController +@RequestMapping("/api/upload") +@RequiredArgsConstructor +public class UploadApiController { + + private final UploadService uploadService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + + /* + @Operation(summary = "데이터셋 대용량 업로드 세션 시작", description = "데이터셋 대용량 파일 업로드 세션을 시작합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "세션 생성 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = UploadDto.InitRes.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/chunk-upload-init") + public ApiResponseDto initUpload( + @RequestBody @Valid UploadDto.InitReq initReq) { + return ApiResponseDto.createOK(uploadService.initUpload(initReq)); + } + */ + + @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ApiResponseDto fileChunkUpload( + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { + + String uploadDivi = "dataset"; + + UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq(); + upAddReqDto.setDatasetId(0L); + upAddReqDto.setUuid(uuid); + upAddReqDto.setFileName(fileName); + upAddReqDto.setFileSize(fileSize); + upAddReqDto.setChunkIndex(chunkIndex); + upAddReqDto.setChunkTotalIndex(chunkTotalIndex); + upAddReqDto.setUploadDivi(uploadDivi); + upAddReqDto.setFinalPath(datasetDir); + upAddReqDto.setTempPath(datasetTmpDir); + + System.out.println("uuid === " + uuid); + + return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile)); + } + + @Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "업로드 완료 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PutMapping("/chunk-upload-complete/{uuid}") + public ApiResponseDto completeUpload(@PathVariable UUID uuid) { + return ApiResponseDto.ok(uploadService.completeUpload(uuid)); + } + + /* + @Operation(summary = "업로드 상태 조회", description = "업로드 진행 상태를 조회합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "조회 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = UploadDto.Status.class))), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/status") + public ApiResponseDto getUploadStatus( + @RequestBody @Valid UploadDto.StatusReq statusReq) { + return ApiResponseDto.ok(uploadService.getUploadStatus(statusReq)); + } + */ +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java new file mode 100644 index 00000000..5ca61ac4 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java @@ -0,0 +1,218 @@ +package com.kamco.cd.kamcoback.upload.dto; + +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotBlank; +import java.util.UUID; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +public class UploadDto { + + @Schema(name = "InitReq", description = "업로드(Chunk) 세션 초기화") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class InitReq { + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "총 청크 수", example = "100") + private Integer chunkTotalIndex; + + /* + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + */ + + @Schema(description = "업로드구분", example = "model") + private String uploadDivi; + } + + @Schema(name = "UploadAddReq", description = "업로드 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class UploadAddReq { + @Schema(description = "업로드 ID", example = "각데이터의 식별키") + private String uploadId; + + @Schema(description = "데이터식별키", example = "129227333") + private Long datasetId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "파일명", example = "data.zip") + private String finalPath; + + @Schema(description = "업로드구분", example = "dataset") + private String uploadDivi; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "임시저장경로") + private String tempPath; + + @Schema(description = "업로드 청크 Index", example = "50") + private Integer chunkIndex; + + @Schema(description = "총 청크 수", example = "100") + private Integer chunkTotalIndex; + + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + + @Schema(description = "uuid", example = "303d4e24-1726-4272-bbc7-01ab85692b80") + private UUID uuid; + } + + @Schema(name = "UploadCompleteReq", description = "업로드 완료 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class CompleteReq { + + @NotBlank(message = "업로드 ID는 필수입니다") + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + } + + @Schema(name = "UploadStatusReq", description = "업로드 상태 조회 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class StatusReq { + + @NotBlank(message = "업로드 ID는 필수입니다") + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + } + + @Schema(name = "UploadStatus", description = "업로드 상태 정보") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class Status { + + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "총 청크 수", example = "100") + private Integer totalChunks; + + @Schema(description = "업로드된 청크 수", example = "50") + private Integer uploadedChunks; + + @Schema(description = "진행률 (%)", example = "50.0") + private Double progress; + + @Schema(description = "에러 메시지", example = "") + private String errorMessage; + } + + @Schema(name = "UploadAddReq", description = "업로드 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class uploadDto { + @Schema(description = "업로드 ID", example = "각데이터의 식별키") + private String uploadId; + + @Schema(description = "데이터식별키", example = "129227333") + private Long datasetId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "파일명", example = "data.zip") + private String finalPath; + + @Schema(description = "업로드구분", example = "dataset") + private String uploadDivi; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "임시저장경로") + private String tempPath; + + @Schema(description = "업로드 청크 Index", example = "50") + private Integer chunkIndex; + + @Schema(description = "총 청크 Index", example = "100") + private Integer chunkTotalIndex; + + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + + @Schema(description = "uuid") + private UUID uuid; + } + + @Schema(name = "UploadRes", description = "업로드 수행 후 리턴") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class UploadRes { + private String res; + private String resMsg; + private UUID uuid; + private String filePath; + private String fileName; + private int chunkIndex; + private int chunkTotalIndex; + + public double getUploadRate() { + if (this.chunkTotalIndex == 0) { + return 0.0; + } + return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0; + } + } + + @Schema(name = "DmlReturn", description = "수행 후 리턴") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class DmlReturn { + + private String flag; + private String message; + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java new file mode 100644 index 00000000..eae19b6e --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java @@ -0,0 +1,237 @@ +package com.kamco.cd.kamcoback.upload.service; + +import com.kamco.cd.kamcoback.common.enums.FileUploadStatus; +import com.kamco.cd.kamcoback.common.utils.FIleChecker; +import com.kamco.cd.kamcoback.postgres.core.UploadSessionCoreService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.dto.UploadDto.DmlReturn; +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +@Slf4j +@Service +@RequiredArgsConstructor +public class UploadService { + + private final UploadSessionCoreService uploadSessionCoreService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + + @Transactional + public DmlReturn initUpload(UploadDto.InitReq initReq) { + + return new DmlReturn("success", "UPLOAD CHUNK INIT"); + } + + @Transactional + public UploadDto.UploadRes uploadChunk(UploadDto.UploadAddReq upAddReqDto, MultipartFile file) { + + UploadDto.UploadRes upRes = new UploadDto.UploadRes(); + + long datasetId = 0; + if (upAddReqDto.getDatasetId() != null) datasetId = upAddReqDto.getDatasetId(); + String uploadId = System.currentTimeMillis() + ""; + // UUID uuid = UUID.randomUUID(); + UUID uuid = upAddReqDto.getUuid(); + String tmpDataSetDir = upAddReqDto.getTempPath() + uuid; + String fianlDir = upAddReqDto.getFinalPath() + uuid; + String uploadDivi = upAddReqDto.getUploadDivi(); + // String fileName = file.getOriginalFilename(); + String fileName = upAddReqDto.getFileName(); + Integer chunkIndex = upAddReqDto.getChunkIndex(); + Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex(); + String status = FileUploadStatus.INIT.name(); + + upRes.setUuid(uuid); + upRes.setFilePath(fianlDir); + upRes.setFileName(fileName); + + upAddReqDto.setUuid(uuid); + upAddReqDto.setUploadId(uploadId); + upAddReqDto.setStatus(status); + upAddReqDto.setFileName(fileName); + upAddReqDto.setTempPath(tmpDataSetDir); + upAddReqDto.setFinalPath(fianlDir); + + // 세션 신규,중복체크(초기화 포함) + UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes); + if (!upRes.getRes().equals("success")) return upRes; + + status = FileUploadStatus.UPLOADING.name(); + upAddReqDto.setStatus(status); + + if (dto != null) { + tmpDataSetDir = dto.getTempPath(); + fianlDir = dto.getFinalPath(); + } + + // 폴더 생성 및 체크 + if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes; + + // chunk저장하기 + if (!FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex)) { + upRes.setRes("fail"); + upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러"); + } + + if (chunkIndex == chunkTotalIndex) { + + upAddReqDto.setUploadId(dto.getUploadId()); + upAddReqDto.setStatus(FileUploadStatus.DONE.name()); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + + try { + this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex); + + upAddReqDto.setUploadId(dto.getUploadId()); + upAddReqDto.setStatus("MERGED"); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + + } catch (IOException e) { + // throw new RuntimeException(e); + upRes.setRes("fail"); + upRes.setResMsg("파일Chunk 병합(merge) 애러"); + return upRes; + } + } + + return upRes; + } + + @Transactional + public UploadDto.UploadRes completeUpload(UUID uuid) { + + UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid); + UploadDto.UploadRes upRes = new UploadDto.UploadRes(); + upRes.setRes("success"); + upRes.setResMsg("병합(merge) 정상처리되었습니다."); + upRes.setUuid(uuid); + upRes.setFilePath(dto.getFinalPath()); + upRes.setFileName(dto.getFileName()); + upRes.setChunkIndex(dto.getChunkIndex()); + upRes.setChunkTotalIndex(dto.getChunkTotalIndex()); + + try { + this.mergeChunks( + dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex()); + } catch (IOException e) { + + upRes.setRes("fail"); + upRes.setResMsg("병합(merge) 애러"); + + return upRes; + } + + return upRes; + } + + public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) { + + if (!FIleChecker.mkDir(tmpDataSetDir)) { + upRes.setRes("fail"); + upRes.setRes("CHUNK 폴더 생성 ERROR"); + return false; + } + + if (!FIleChecker.mkDir(fianlDir)) { + upRes.setRes("fail"); + upRes.setRes("업로드 완료 폴더 생성 ERROR"); + return false; + } + + return true; + } + + public UploadDto.uploadDto checkUploadSession( + UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) { + + upRes.setRes("success"); + upRes.setResMsg("정상처리되었습니다."); + + UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid()); + + if (upAddReqDto.getChunkIndex() == 0) { + if (dto != null) { + upRes.setRes("duplicate"); + upRes.setResMsg("이미 진행중인 업로드세션입니다."); + return dto; + } + + upAddReqDto.setStatus("UPLOADING"); + upRes.setUuid(upAddReqDto.getUuid()); + uploadSessionCoreService.createUploadSession(upAddReqDto); + } else { + if (dto == null) { + upRes.setRes("nosession"); + upRes.setResMsg("업로드 세션이 존재하지 않습니다."); + return dto; + } + + upAddReqDto.setStatus("UPLOADING"); + upAddReqDto.setUploadId(dto.getUploadId()); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + } + + if (dto != null) upRes.setUuid(dto.getUuid()); + + upRes.setChunkIndex(upAddReqDto.getChunkIndex()); + upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex()); + + return dto; + } + + public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) + throws IOException { + + Path outputPath = Paths.get(fianlDir, fileName); + try (FileChannel outChannel = + FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { + for (int i = 0; i <= chunkTotalIndex; i++) { + Path chunkPath = Paths.get(tmpDir, i + ""); + + try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) { + long transferred = 0; + long size = inChannel.size(); + while (transferred < size) { + transferred += inChannel.transferTo(transferred, size - transferred, outChannel); + } + } + // 병합 후 즉시 삭제하여 디스크 공간 확보 + Files.delete(chunkPath); + } + } + + // 병합후 임시 폴더 삭제 + FIleChecker.deleteFolder(tmpDir); + } +} diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index ffeb27eb..f03cbd99 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -67,7 +67,14 @@ management: file: #sync-root-dir: D:/app/original-images/ - # sync-root-dir: /app/original-images/ - sync-root-dir: /kamco-nfs/images/ + sync-root-dir: /app/original-images/ sync-tmp-dir: ${file.sync-root-dir}/tmp sync-file-extention: tfw,tif + + #dataset-dir: D:/app/dataset/ + dataset-dir: /app/dataset/ + dataset-tmp-dir: ${file.dataset-dir}tmp/ + + #model-dir: D:/app/model/ + model-dir: /app/model/ + model-tmp-dir: ${file.model-dir}tmp/ diff --git a/src/main/resources/static/chunk_upload_test.html b/src/main/resources/static/chunk_upload_test.html new file mode 100644 index 00000000..e05993e7 --- /dev/null +++ b/src/main/resources/static/chunk_upload_test.html @@ -0,0 +1,137 @@ + + + + + Chunk Upload Test + + +

대용량 파일 청크 업로드 테스트

+ +* Chunk 테스트 사이즈 10M (10 * 1024 * 1024) - 성능에 따라 변경가능

+ +* 업로드 API선택

+ +

+* 파일첨부

+

+ +



+* 업로드시 업로드 이력을 추적하기 위해 UUID생성해서 전달(파일병합시 사용)(script 예제참고)

+UUID :

+ +* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)

+chunkIndex :

+chunkTotalIndex :

+ +* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)

+fileSize :

+ + + +

+* 진행율(%)

+
+

+* 결과메세지

+
+ + + +