From eb88717791065c0a59b379fca5de939b0bb34fdd Mon Sep 17 00:00:00 2001 From: Moon Date: Thu, 8 Jan 2026 18:35:19 +0900 Subject: [PATCH 1/8] =?UTF-8?q?chunk=EC=97=85=EB=A1=9C=EB=93=9C=20?= =?UTF-8?q?=EA=B3=B5=ED=86=B5,=20=EB=AA=A8=EB=8D=B8=EA=B4=80=EB=A6=AC=20?= =?UTF-8?q?=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../common/enums/FileUploadStatus.java | 28 ++ .../kamcoback/common/utils/FIleChecker.java | 18 ++ .../cd/kamcoback/config/SecurityConfig.java | 8 +- .../model/ModelMngApiController.java | 80 +++++- .../cd/kamcoback/model/dto/ModelMngDto.java | 14 +- .../model/service/ModelMngService.java | 52 +++- .../postgres/core/ModelMngCoreService.java | 23 +- .../core/UploadSessionCoreService.java | 56 ++++ .../postgres/entity/ModelMngEntity.java | 4 + .../postgres/entity/UploadSessionEntity.java | 90 +++++++ .../model/ModelMngRepositoryCustom.java | 8 + .../model/ModelMngRepositoryImpl.java | 52 +++- .../upload/UploadSessionRepository.java | 7 + .../upload/UploadSessionRepositoryCustom.java | 19 ++ .../upload/UploadSessionRepositoryImpl.java | 142 ++++++++++ .../kamcoback/upload/UploadApiController.java | 144 ++++++++++ .../cd/kamcoback/upload/dto/UploadDto.java | 224 ++++++++++++++++ .../upload/service/UploadService.java | 252 ++++++++++++++++++ .../resources/static/chunk_upload_test.html | 137 ++++++++++ 19 files changed, 1335 insertions(+), 23 deletions(-) create mode 100644 src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java create mode 100644 src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java create mode 100644 src/main/resources/static/chunk_upload_test.html diff --git a/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java b/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java new file mode 100644 index 00000000..d6c1b004 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/common/enums/FileUploadStatus.java @@ -0,0 +1,28 @@ +package com.kamco.cd.kamcoback.common.enums; + +import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; +import com.kamco.cd.kamcoback.common.utils.enums.EnumType; +import lombok.AllArgsConstructor; +import lombok.Getter; + +@CodeExpose +@Getter +@AllArgsConstructor +public enum FileUploadStatus implements EnumType { + INIT("초기화"), + UPLOADING("업로드중"), + DONE("업로드완료"), + MERGED("병합완료"); + + private final String desc; + + @Override + public String getId() { + return name(); + } + + @Override + public String getText() { + return desc; + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java index ce622964..dd3c6ce8 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java @@ -29,6 +29,7 @@ import lombok.Getter; import org.apache.commons.io.FilenameUtils; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.gce.geotiff.GeoTiffReader; +import org.springframework.util.FileSystemUtils; import org.springframework.web.multipart.MultipartFile; public class FIleChecker { @@ -489,6 +490,23 @@ public class FIleChecker { return true; } + public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) { + File dest = new File(targetPath, String.valueOf(chunkIndex)); + + boolean fileUpload = true; + try { + mfile.transferTo(dest); + } catch (IOException e) { + return false; + } + + return true; + } + + public static boolean deleteFolder(String path) { + return FileSystemUtils.deleteRecursively(new File(path)); + } + public static boolean validationMultipart(MultipartFile mfile) { // 파일 유효성 검증 if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) { diff --git a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java index 8fa98624..f254918a 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java @@ -44,6 +44,8 @@ public class SecurityConfig { .authorizeHttpRequests( auth -> auth + // .requestMatchers("/chunk_upload_test.html").authenticated() + // 맵시트 영역 전체 허용 (우선순위 최상단) .requestMatchers("/api/mapsheet/**") .permitAll() @@ -77,7 +79,11 @@ public class SecurityConfig { "/api/auth/logout", "/swagger-ui/**", "/api/members/*/password", - "/v3/api-docs/**") + "/v3/api-docs/**", + "/chunk_upload_test.html", + "/api/model/file-chunk-upload", + "/api/upload/file-chunk-upload", + "/api/upload/chunk-upload-complete") .permitAll() // 로그인한 사용자만 가능 IAM .requestMatchers("/api/user/**", "/api/my/menus", "/api/code/**") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index 1dcb01b6..c6b89ae5 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.service.ModelMngService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Schema; @@ -11,16 +12,20 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.transaction.Transactional; +import jakarta.validation.Valid; import java.io.IOException; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestPart; @@ -36,6 +41,27 @@ public class ModelMngApiController { private final ModelMngService modelMngService; + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + @Autowired private ZipUtils zipUtils; @Operation(summary = "모델관리 목록") @@ -70,19 +96,59 @@ public class ModelMngApiController { @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) }) - @DeleteMapping("/{modelVer}") + @DeleteMapping("/{uuid}") public ApiResponseDto removeModel( @io.swagger.v3.oas.annotations.parameters.RequestBody( description = "모델 삭제 요청 정보", required = true) @PathVariable - String modelVer) { - return ApiResponseDto.okObject(modelMngService.removeModel(modelVer)); + String uuid) { + return ApiResponseDto.okObject(modelMngService.removeModel(UUID.fromString(uuid))); } - @Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드") - @PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) - public void upload(@RequestPart MultipartFile zipFilie) throws IOException { - zipUtils.processZip(zipFilie.getInputStream()); + + @Operation(summary = "모델등록") + @PostMapping + public ApiResponseDto ModelMgmt( + @RequestBody @Valid ModelMngDto.AddReq addReq) { + + return ApiResponseDto.ok(modelMngService.insertModel(addReq)); + } + + + @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ApiResponseDto fileChunkUpload( + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { + + String uploadDivi = "model"; + + UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq(); + upAddReqDto.setDatasetId(0L); + upAddReqDto.setUuid(uuid); + upAddReqDto.setFileName(fileName); + upAddReqDto.setFileSize(fileSize); + upAddReqDto.setChunkIndex(chunkIndex); + upAddReqDto.setChunkTotalIndex(chunkTotalIndex); + upAddReqDto.setUploadDivi(uploadDivi); + upAddReqDto.setFinalPath(modelDir); + upAddReqDto.setTempPath(modelTmpDir); + + System.out.println("uuid === "+ uuid); + + return ApiResponseDto.ok(modelMngService.uploadChunkModelFile(upAddReqDto, chunkFile)); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java index ea9da363..27058f85 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java @@ -1,11 +1,13 @@ package com.kamco.cd.kamcoback.model.dto; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import io.swagger.v3.oas.annotations.media.Schema; import java.math.BigDecimal; import java.time.ZonedDateTime; +import java.util.UUID; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; @@ -96,7 +98,6 @@ public class ModelMngDto { @AllArgsConstructor public static class ModelList { - private Integer rowNum; private String modelVer; private String fileName; private String modelType; @@ -108,6 +109,7 @@ public class ModelMngDto { private BigDecimal iou; private String memo; private Boolean deleted; + private UUID uuid; } @Schema(name = "ModelAddReq", description = "모델 등록 req") @@ -118,10 +120,14 @@ public class ModelMngDto { public static class AddReq { private String modelType; - private String dockerFileNm; private String modelVer; - private String hyperVer; - private String epochVer; + private String filePath; + private String fileName; + private String memo; + + @JsonIgnore + private UUID uuid; + } @Schema(name = "searchReq", description = "검색 요청") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java index 02c7e518..d50239ed 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java @@ -3,10 +3,15 @@ package com.kamco.cd.kamcoback.model.service; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.service.UploadService; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; +import org.springframework.web.multipart.MultipartFile; @Service @RequiredArgsConstructor @@ -14,6 +19,29 @@ public class ModelMngService { private final ModelMngCoreService modelMngCoreService; + private final UploadService uploadService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + public Page findModelMgmtList( ModelMngDto.searchReq searchReq, LocalDate startDate, @@ -24,7 +52,27 @@ public class ModelMngService { searchReq, startDate, endDate, modelType, searchVal); } - public ApiResponseDto.ResponseObj removeModel(String modelVer) { - return modelMngCoreService.removeModel(modelVer); + public ApiResponseDto.ResponseObj removeModel(UUID uuid) { + + modelMngCoreService.removeModel(uuid); + + return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다."); } + + + public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) { + UUID uuid = UUID.randomUUID(); + addReq.setUuid(uuid); + modelMngCoreService.insertModel(addReq); + return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다."); + } + + public UploadDto.UploadRes uploadChunkModelFile(UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) + { + UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile); + + return upRes; + } + + } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java index 7acb4c21..87fa15d1 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java @@ -6,6 +6,7 @@ import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository; import jakarta.persistence.EntityNotFoundException; import java.time.LocalDate; +import java.util.UUID; import lombok.RequiredArgsConstructor; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; @@ -26,14 +27,24 @@ public class ModelMngCoreService { searchReq, startDate, endDate, modelType, searchVal); } - public ApiResponseDto.ResponseObj removeModel(String modelVer) { + public void removeModel(UUID uuid) { + /* ModelMngEntity entity = modelMngRepository - .findByModelUid(modelVer) - .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다. ver: " + modelVer)); - + .findByModelUuid(uuid) + .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다.")); + */ // id 코드 deleted = true 업데이트 - entity.deleted(); - return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, ""); + //entity.deleted(); + + modelMngRepository.deleteByModelUuid(uuid); + } + + public void insertModel(ModelMngDto.AddReq addReq){ + + //ModelMngEntity addEntity = new ModelMngEntity(); + //addEntity.setModelType(addReq.getModelType()); + + modelMngRepository.insertModel(addReq); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java new file mode 100644 index 00000000..f963464c --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java @@ -0,0 +1,56 @@ +package com.kamco.cd.kamcoback.postgres.core; + +import com.kamco.cd.kamcoback.postgres.repository.upload.UploadSessionRepository; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Service; + +@Service +@RequiredArgsConstructor +public class UploadSessionCoreService { + + private final UploadSessionRepository uploadSessionRepository; + + public void createUploadSession(UploadDto.UploadAddReq addReq) + { + /* + UUID newUuid = UUID.randomUUID(); + + UploadSessionEntity entity = new UploadSessionEntity(); + entity.setUploadId(addReq.getUploadId()); + entity.setDatasetId(addReq.getDatasetId()); + entity.setFileName(addReq.getFileName()); + entity.setFileSize(addReq.getFileSize()); + entity.setFinalPath(addReq.getFinalPath()); + entity.setStatus(addReq.getStatus()); + entity.setTempPath(addReq.getTempPath()); + entity.setChunkIndex(addReq.getChunkIndex()); + entity.setChunkTotalIndex(addReq.getChunkTotalIndex()); + entity.setUploadDivi(addReq.getUploadDivi()); + entity.setFileHash(addReq.getFileHash()); + entity.setUuid(newUuid); + + //System.out.println("======================"); + + UploadSessionEntity saved = uploadSessionRepository.save(entity); + + return String.valueOf(saved.getUuid()); + */ + + uploadSessionRepository.insertUploadSession(addReq); + + } + + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi){ + return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi); + } + + public UploadDto.uploadDto findByUuid(UUID uuid){ + return uploadSessionRepository.findByUuid(uuid); + } + + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){ + uploadSessionRepository.updateUploadSessionStatus(addReq); + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java index 9c06015f..d97ecf40 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/ModelMngEntity.java @@ -8,6 +8,7 @@ import jakarta.persistence.GenerationType; import jakarta.persistence.Id; import jakarta.persistence.Table; import java.time.ZonedDateTime; +import java.util.UUID; import lombok.Getter; import lombok.Setter; @@ -52,6 +53,9 @@ public class ModelMngEntity extends CommonDateEntity { @Column(name = "memo") private String memo; + @Column(name = "uuid") + private UUID uuid; + public void deleted() { this.deleted = true; } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java new file mode 100644 index 00000000..58646a83 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java @@ -0,0 +1,90 @@ +package com.kamco.cd.kamcoback.postgres.entity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import jakarta.validation.constraints.Size; +import java.time.OffsetDateTime; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.annotations.ColumnDefault; + +@Getter +@Setter +@Entity +@Table(name = "tb_upload_session") +public class UploadSessionEntity { + + @Id + @Size(max = 100) + @Column(name = "upload_id", nullable = false, length = 100) + private String uploadId; + + @Column(name = "completed_dttm") + private OffsetDateTime completedDttm; + + @NotNull + @ColumnDefault("now()") + @Column(name = "created_dttm", nullable = false) + private OffsetDateTime createdDttm; + + @Column(name = "dataset_id") + private Long datasetId; + + @Column(name = "error_message", length = Integer.MAX_VALUE) + private String errorMessage; + + @Size(max = 255) + @Column(name = "file_name") + private String fileName; + + @ColumnDefault("0") + @Column(name = "file_size") + private Long fileSize; + + @Size(max = 500) + @Column(name = "final_path", length = 500) + private String finalPath; + + @Size(max = 20) + @Column(name = "status", length = 20) + private String status; + + @Size(max = 500) + @Column(name = "temp_path", length = 500) + private String tempPath; + + @Column(name = "chunk_total_index") + private Integer chunkTotalIndex; + + @NotNull + @ColumnDefault("now()") + @Column(name = "updated_dttm", nullable = false) + private OffsetDateTime updatedDttm; + + @Column(name = "chunk_index") + private Integer chunkIndex; + + @Size(max = 50) + @Column(name = "upload_divi", length = 50) + private String uploadDivi; + + @Size(max = 300) + @Column(name = "file_hash", length = 300) + private String fileHash; + + @Column(name = "total_chunks") + private Integer totalChunks; + + @Column(name = "uploaded_chunks") + private Integer uploadedChunks; + + @NotNull + @ColumnDefault("uuid_generate_v4()") + @Column(name = "uuid", nullable = false) + private UUID uuid; + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java index 5d34fdee..e82b4593 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java @@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import java.time.LocalDate; import java.util.Optional; +import java.util.UUID; import org.springframework.data.domain.Page; public interface ModelMngRepositoryCustom { @@ -16,4 +17,11 @@ public interface ModelMngRepositoryCustom { String searchVal); Optional findByModelUid(String modelVer); + + Optional findByModelUuid(UUID uuid); + + + void insertModel(ModelMngDto.AddReq addReq); + + void deleteByModelUuid(UUID uuid); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java index 18290f45..92cd36ef 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java @@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.NumberPath; import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.jpa.impl.JPAQueryFactory; +import jakarta.validation.Valid; import java.math.BigDecimal; import java.time.LocalDate; import java.time.LocalDateTime; @@ -22,6 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.UUID; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; @@ -67,8 +69,6 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport .select( Projections.constructor( ModelMngDto.ModelList.class, - Expressions.numberTemplate( - Integer.class, "row_number() over(order by {0} desc)", sortColumn), modelMngEntity.modelVer, modelMngEntity.fileName, modelMngEntity.modelType, @@ -80,7 +80,8 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport roundNumericToPercent(modelResultMetricEntity.loss), roundNumericToPercent(modelResultMetricEntity.iou), modelMngEntity.memo, - modelMngEntity.deleted)) + modelMngEntity.deleted, + modelMngEntity.uuid)) .from(modelMngEntity) .innerJoin(modelResultMetricEntity) .on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid)) @@ -116,6 +117,18 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport .fetchOne()); } + @Override + public Optional findByModelUuid(UUID uuid) { + + System.out.println("uuid == " + uuid); + + return Optional.ofNullable( + queryFactory + .selectFrom(modelMngEntity) + .where(modelMngEntity.uuid.eq(uuid)) + .fetchOne()); + } + private BooleanExpression eventEndedAtBetween( LocalDate startDate, LocalDate endDate, String sortColumn) { if (Objects.isNull(startDate) || Objects.isNull(endDate)) { @@ -155,4 +168,37 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport private Expression roundNumericToPercent(NumberPath ratio) { return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio); } + + @Override + public void insertModel(@Valid ModelMngDto.AddReq addReq) { + long execCount = + queryFactory + .insert(modelMngEntity) + .columns( + modelMngEntity.modelVer, + modelMngEntity.modelType, + modelMngEntity.filePath, + modelMngEntity.fileName, + modelMngEntity.memo, + modelMngEntity.uuid) + .values( + addReq.getModelVer(), + addReq.getModelType(), + addReq.getFilePath(), + addReq.getFileName(), + addReq.getMemo(), + addReq.getUuid()) + .execute(); + } + + @Override + public void deleteByModelUuid(UUID uuid){ + long execCount = + queryFactory + .update(modelMngEntity) + .set(modelMngEntity.deleted, true) + .where(modelMngEntity.uuid.eq(uuid)) + .execute(); + } + } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java new file mode 100644 index 00000000..6499f129 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepository.java @@ -0,0 +1,7 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + +import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface UploadSessionRepository + extends JpaRepository, UploadSessionRepositoryCustom {} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java new file mode 100644 index 00000000..a6502e85 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java @@ -0,0 +1,19 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + + + +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import java.util.UUID; + +public interface UploadSessionRepositoryCustom { + + + void insertUploadSession(UploadDto.UploadAddReq addReq); + + UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi); + + UploadDto.uploadDto findByUuid(UUID uuid); + void updateUploadSessionStatus(UploadDto.UploadAddReq addReq); + + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java new file mode 100644 index 00000000..e6914ee2 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java @@ -0,0 +1,142 @@ +package com.kamco.cd.kamcoback.postgres.repository.upload; + + +import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity; + +import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.querydsl.core.types.Projections; +import com.querydsl.core.types.dsl.Expressions; +import com.querydsl.core.types.dsl.StringExpression; +import com.querydsl.jpa.impl.JPAQueryFactory; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import java.util.UUID; +import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport; + +public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport + implements UploadSessionRepositoryCustom { + + private final JPAQueryFactory queryFactory; + private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)"); + + @PersistenceContext private EntityManager em; + + public UploadSessionRepositoryImpl(JPAQueryFactory queryFactory) { + super(UploadSessionEntity.class); + this.queryFactory = queryFactory; + } + + @Override + public void insertUploadSession(UploadDto.UploadAddReq addReq) { + long execCnt = + queryFactory + .insert(uploadSessionEntity) + .columns( + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid + ) + .values( + addReq.getUploadId(), + addReq.getDatasetId(), + addReq.getFileName(), + addReq.getFileSize(), + addReq.getFinalPath(), + addReq.getStatus(), + addReq.getTempPath(), + addReq.getChunkIndex(), + addReq.getChunkTotalIndex(), + addReq.getUploadDivi(), + addReq.getFileHash(), + addReq.getUuid() + ) + .execute(); + + + } + + + @Override + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { + + UploadDto.uploadDto foundContent = + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid + )) + .from(uploadSessionEntity) + .where(uploadSessionEntity.datasetId.eq(datasetId) + .and(uploadSessionEntity.uploadDivi.eq(uploadDivi))) + .limit(1) + .fetchOne(); + + return foundContent; + + } + + + @Override + public UploadDto.uploadDto findByUuid(UUID uuid) { + + UploadDto.uploadDto foundContent = + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid + )) + .from(uploadSessionEntity) + .where(uploadSessionEntity.uuid.eq(uuid)) + .limit(1) + .fetchOne(); + + return foundContent; + + } + + + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){ + long fileCount = + queryFactory + .update(uploadSessionEntity) + .set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex()) + .set(uploadSessionEntity.status, addReq.getStatus()) + .where(uploadSessionEntity.uploadId.eq(addReq.getUploadId())) + .execute(); + } + +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java new file mode 100644 index 00000000..ceae6eb9 --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java @@ -0,0 +1,144 @@ +package com.kamco.cd.kamcoback.upload; + +import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.service.UploadService; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RequestPart; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +@Tag(name = "파일 업로드", description = "대용량 파일 업로드 API") +@RestController +@RequestMapping("/api/upload") +@RequiredArgsConstructor +public class UploadApiController { + + private final UploadService uploadService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + + /* + @Operation(summary = "데이터셋 대용량 업로드 세션 시작", description = "데이터셋 대용량 파일 업로드 세션을 시작합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "세션 생성 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = UploadDto.InitRes.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/chunk-upload-init") + public ApiResponseDto initUpload( + @RequestBody @Valid UploadDto.InitReq initReq) { + return ApiResponseDto.createOK(uploadService.initUpload(initReq)); + } + */ + + @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + public ApiResponseDto fileChunkUpload( + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { + + String uploadDivi = "dataset"; + + UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq(); + upAddReqDto.setDatasetId(0L); + upAddReqDto.setUuid(uuid); + upAddReqDto.setFileName(fileName); + upAddReqDto.setFileSize(fileSize); + upAddReqDto.setChunkIndex(chunkIndex); + upAddReqDto.setChunkTotalIndex(chunkTotalIndex); + upAddReqDto.setUploadDivi(uploadDivi); + upAddReqDto.setFinalPath(datasetDir); + upAddReqDto.setTempPath(datasetTmpDir); + + System.out.println("uuid === "+ uuid); + + return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile)); + } + + @Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.") + @ApiResponses( + value = { + @ApiResponse(responseCode = "200", description = "업로드 완료 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PutMapping("/chunk-upload-complete/{uuid}") + public ApiResponseDto completeUpload( + @PathVariable UUID uuid) { + return ApiResponseDto.ok(uploadService.completeUpload(uuid)); + } + + /* + @Operation(summary = "업로드 상태 조회", description = "업로드 진행 상태를 조회합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "조회 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = UploadDto.Status.class))), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/status") + public ApiResponseDto getUploadStatus( + @RequestBody @Valid UploadDto.StatusReq statusReq) { + return ApiResponseDto.ok(uploadService.getUploadStatus(statusReq)); + } + */ +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java new file mode 100644 index 00000000..7096c87c --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java @@ -0,0 +1,224 @@ +package com.kamco.cd.kamcoback.upload.dto; + +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotBlank; +import java.util.UUID; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +public class UploadDto { + + @Schema(name = "InitReq", description = "업로드(Chunk) 세션 초기화") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class InitReq { + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "총 청크 수", example = "100") + private Integer chunkTotalIndex; + + /* + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + */ + + @Schema(description = "업로드구분", example = "model") + private String uploadDivi; + } + + @Schema(name = "UploadAddReq", description = "업로드 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class UploadAddReq { + @Schema(description = "업로드 ID", example = "각데이터의 식별키") + private String uploadId; + + @Schema(description = "데이터식별키", example = "129227333") + private Long datasetId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "파일명", example = "data.zip") + private String finalPath; + + @Schema(description = "업로드구분", example = "dataset") + private String uploadDivi; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "임시저장경로") + private String tempPath; + + @Schema(description = "업로드 청크 Index", example = "50") + private Integer chunkIndex; + + @Schema(description = "총 청크 수", example = "100") + private Integer chunkTotalIndex; + + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + + @Schema(description = "uuid", example = "303d4e24-1726-4272-bbc7-01ab85692b80") + private UUID uuid; + } + + @Schema(name = "UploadCompleteReq", description = "업로드 완료 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class CompleteReq { + + @NotBlank(message = "업로드 ID는 필수입니다") + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + } + + @Schema(name = "UploadStatusReq", description = "업로드 상태 조회 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class StatusReq { + + @NotBlank(message = "업로드 ID는 필수입니다") + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + } + + @Schema(name = "UploadStatus", description = "업로드 상태 정보") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class Status { + + @Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123") + private String uploadId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "총 청크 수", example = "100") + private Integer totalChunks; + + @Schema(description = "업로드된 청크 수", example = "50") + private Integer uploadedChunks; + + @Schema(description = "진행률 (%)", example = "50.0") + private Double progress; + + @Schema(description = "에러 메시지", example = "") + private String errorMessage; + } + + @Schema(name = "UploadAddReq", description = "업로드 요청") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class uploadDto { + @Schema(description = "업로드 ID", example = "각데이터의 식별키") + private String uploadId; + + @Schema(description = "데이터식별키", example = "129227333") + private Long datasetId; + + @Schema(description = "파일명", example = "data.zip") + private String fileName; + + + @Schema(description = "파일 크기 (bytes)", example = "10737418240") + private Long fileSize; + + @Schema(description = "파일명", example = "data.zip") + private String finalPath; + + @Schema(description = "업로드구분", example = "dataset") + private String uploadDivi; + + @Schema(description = "상태", example = "UPLOADING") + private String status; + + @Schema(description = "임시저장경로") + private String tempPath; + + @Schema(description = "업로드 청크 Index", example = "50") + private Integer chunkIndex; + + @Schema(description = "총 청크 Index", example = "100") + private Integer chunkTotalIndex; + + + @Schema( + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + private String fileHash; + + @Schema(description = "uuid") + private UUID uuid; + + } + + @Schema(name = "UploadRes", description = "업로드 수행 후 리턴") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class UploadRes { + private String res; + private String resMsg; + private UUID uuid; + private String filePath; + private String fileName; + private int chunkIndex; + private int chunkTotalIndex; + + public double getUploadRate() { + if (this.chunkTotalIndex == 0) { + return 0.0; + } + return (double) (this.chunkIndex+1) / (this.chunkTotalIndex+1) * 100.0; + } + + } + + + + @Schema(name = "DmlReturn", description = "수행 후 리턴") + @Getter + @Setter + @NoArgsConstructor + @AllArgsConstructor + public static class DmlReturn { + + private String flag; + private String message; + } +} diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java new file mode 100644 index 00000000..de6c3dae --- /dev/null +++ b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java @@ -0,0 +1,252 @@ +package com.kamco.cd.kamcoback.upload.service; + +import com.kamco.cd.kamcoback.common.enums.FileUploadStatus; +import com.kamco.cd.kamcoback.common.utils.FIleChecker; +import com.kamco.cd.kamcoback.postgres.core.UploadSessionCoreService; +import com.kamco.cd.kamcoback.upload.dto.UploadDto; +import com.kamco.cd.kamcoback.upload.dto.UploadDto.DmlReturn; +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.UUID; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; + +@Slf4j +@Service +@RequiredArgsConstructor +public class UploadService { + + private final UploadSessionCoreService uploadSessionCoreService; + + @Value("${file.sync-root-dir}") + private String syncRootDir; + + @Value("${file.sync-tmp-dir}") + private String syncTmpDir; + + @Value("${file.sync-file-extention}") + private String syncFileExtention; + + @Value("${file.dataset-dir}") + private String datasetDir; + + @Value("${file.dataset-tmp-dir}") + private String datasetTmpDir; + + @Value("${file.model-dir}") + private String modelDir; + + @Value("${file.model-tmp-dir}") + private String modelTmpDir; + + @Transactional + public DmlReturn initUpload(UploadDto.InitReq initReq) { + + return new DmlReturn("success", "UPLOAD CHUNK INIT"); + } + + @Transactional + public UploadDto.UploadRes uploadChunk(UploadDto.UploadAddReq upAddReqDto, MultipartFile file) { + + UploadDto.UploadRes upRes = new UploadDto.UploadRes(); + + long datasetId = 0; + if( upAddReqDto.getDatasetId() != null )datasetId = upAddReqDto.getDatasetId(); + String uploadId = System.currentTimeMillis()+""; + //UUID uuid = UUID.randomUUID(); + UUID uuid = upAddReqDto.getUuid(); + String tmpDataSetDir = upAddReqDto.getTempPath()+uuid; + String fianlDir = upAddReqDto.getFinalPath()+uuid; + String uploadDivi = upAddReqDto.getUploadDivi(); + //String fileName = file.getOriginalFilename(); + String fileName = upAddReqDto.getFileName(); + Integer chunkIndex = upAddReqDto.getChunkIndex(); + Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex(); + String status = FileUploadStatus.INIT.name(); + + upRes.setUuid(uuid); + upRes.setFilePath(fianlDir); + upRes.setFileName(fileName); + + upAddReqDto.setUuid(uuid); + upAddReqDto.setUploadId(uploadId); + upAddReqDto.setStatus(status); + upAddReqDto.setFileName(fileName); + upAddReqDto.setTempPath(tmpDataSetDir); + upAddReqDto.setFinalPath(fianlDir); + + //세션 신규,중복체크(초기화 포함) + UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes); + if( !upRes.getRes().equals("success") )return upRes; + + + status = FileUploadStatus.UPLOADING.name(); + upAddReqDto.setStatus(status); + + if( dto != null ) + { + tmpDataSetDir = dto.getTempPath(); + fianlDir = dto.getFinalPath(); + } + + //폴더 생성 및 체크 + if( ! checkChunkFoler(upRes, tmpDataSetDir, fianlDir) )return upRes; + + //chunk저장하기 + if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) ) + { + upRes.setRes("fail"); + upRes.setResMsg("chunkIndex:"+chunkIndex+" 업로드 애러"); + } + + if( chunkIndex == chunkTotalIndex ) { + + upAddReqDto.setUploadId(dto.getUploadId()); + upAddReqDto.setStatus(FileUploadStatus.DONE.name()); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + + + try { + this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex); + + upAddReqDto.setUploadId(dto.getUploadId()); + upAddReqDto.setStatus("MERGED"); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + + } catch (IOException e) { + //throw new RuntimeException(e); + upRes.setRes("fail"); + upRes.setResMsg("파일Chunk 병합(merge) 애러"); + return upRes; + } + + + + } + + return upRes; + } + + @Transactional + public UploadDto.UploadRes completeUpload(UUID uuid) { + + UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid); + UploadDto.UploadRes upRes = new UploadDto.UploadRes(); + upRes.setRes("success"); + upRes.setResMsg("병합(merge) 정상처리되었습니다."); + upRes.setUuid(uuid); + upRes.setFilePath(dto.getFinalPath()); + upRes.setFileName(dto.getFileName()); + upRes.setChunkIndex(dto.getChunkIndex()); + upRes.setChunkTotalIndex(dto.getChunkTotalIndex()); + + try { + this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex()); + } catch (IOException e) { + + upRes.setRes("fail"); + upRes.setResMsg("병합(merge) 애러"); + + return upRes; + } + + return upRes; + + } + + public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) + { + + if( ! FIleChecker.mkDir(tmpDataSetDir) ) + { + upRes.setRes("fail"); + upRes.setRes("CHUNK 폴더 생성 ERROR"); + return false; + } + + if( ! FIleChecker.mkDir(fianlDir) ) + { + upRes.setRes("fail"); + upRes.setRes("업로드 완료 폴더 생성 ERROR"); + return false; + } + + return true; + } + + + public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) { + + upRes.setRes("success"); + upRes.setResMsg("정상처리되었습니다."); + + UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid()); + + if( upAddReqDto.getChunkIndex() == 0 ) { + if( dto != null ) + { + upRes.setRes("duplicate"); + upRes.setResMsg("이미 진행중인 업로드세션입니다."); + return dto; + } + + upAddReqDto.setStatus("UPLOADING"); + upRes.setUuid( upAddReqDto.getUuid() ); + uploadSessionCoreService.createUploadSession(upAddReqDto); + } + else { + if( dto == null ){ + upRes.setRes("nosession"); + upRes.setResMsg("업로드 세션이 존재하지 않습니다."); + return dto; + } + + upAddReqDto.setStatus("UPLOADING"); + upAddReqDto.setUploadId(dto.getUploadId()); + uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); + } + + if( dto != null )upRes.setUuid( dto.getUuid() ); + + upRes.setChunkIndex(upAddReqDto.getChunkIndex()); + upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex()); + + + return dto; + + } + + public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException { + + Path outputPath = Paths.get(fianlDir, fileName); + try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { + for (int i = 0; i <= chunkTotalIndex; i++) { + Path chunkPath = Paths.get(tmpDir, i+""); + + try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) { + long transferred = 0; + long size = inChannel.size(); + while (transferred < size) { + transferred += inChannel.transferTo(transferred, size - transferred, outChannel); + } + } + // 병합 후 즉시 삭제하여 디스크 공간 확보 + Files.delete(chunkPath); + } + } + + //병합후 임시 폴더 삭제 + FIleChecker.deleteFolder(tmpDir); + + } + + +} diff --git a/src/main/resources/static/chunk_upload_test.html b/src/main/resources/static/chunk_upload_test.html new file mode 100644 index 00000000..e05993e7 --- /dev/null +++ b/src/main/resources/static/chunk_upload_test.html @@ -0,0 +1,137 @@ + + + + + Chunk Upload Test + + +

대용량 파일 청크 업로드 테스트

+ +* Chunk 테스트 사이즈 10M (10 * 1024 * 1024) - 성능에 따라 변경가능

+ +* 업로드 API선택

+ +

+* 파일첨부

+

+ +



+* 업로드시 업로드 이력을 추적하기 위해 UUID생성해서 전달(파일병합시 사용)(script 예제참고)

+UUID :

+ +* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)

+chunkIndex :

+chunkTotalIndex :

+ +* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)

+fileSize :

+ + + +

+* 진행율(%)

+
+

+* 결과메세지

+
+ + + + From 0a311b09a85b3d2b7461850cb93dc336767a6a23 Mon Sep 17 00:00:00 2001 From: Moon Date: Thu, 8 Jan 2026 18:41:32 +0900 Subject: [PATCH 2/8] =?UTF-8?q?=EA=B2=BD=EB=A1=9C=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/resources/application.yml | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index a5036d99..ef668085 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -66,7 +66,15 @@ management: file: - #sync-root-dir: D:/app/original-images/ - sync-root-dir: /app/original-images/ - sync-tmp-dir: ${file.sync-root-dir}/tmp + sync-root-dir: D:/app/original-images/ + #sync-root-dir: /app/original-images/ + sync-tmp-dir: ${file.sync-root-dir}tmp/ sync-file-extention: tfw,tif + + #dataset-dir: D:/app/dataset/ + dataset-dir: /app/dataset/ + dataset-tmp-dir: ${file.dataset-dir}tmp/ + + #model-dir: D:/app/model/ + dataset-dir: /app/model/ + model-tmp-dir: ${file.model-dir}tmp/ From ecf9b8a24fe1e6dda32c91587744c1f835f3f6c5 Mon Sep 17 00:00:00 2001 From: Moon Date: Thu, 8 Jan 2026 18:43:40 +0900 Subject: [PATCH 3/8] =?UTF-8?q?=EC=97=85=EB=A1=9C=EB=93=9C=20=EA=B4=80?= =?UTF-8?q?=EB=A0=A8=20=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../kamcoback/common/utils/FIleChecker.java | 3 +- .../cd/kamcoback/config/SecurityConfig.java | 2 +- .../model/ModelMngApiController.java | 33 ++-- .../cd/kamcoback/model/dto/ModelMngDto.java | 4 +- .../model/service/ModelMngService.java | 7 +- .../postgres/core/ModelMngCoreService.java | 11 +- .../core/UploadSessionCoreService.java | 10 +- .../postgres/entity/UploadSessionEntity.java | 1 - .../model/ModelMngRepositoryCustom.java | 1 - .../model/ModelMngRepositoryImpl.java | 52 +++--- .../upload/UploadSessionRepositoryCustom.java | 6 +- .../upload/UploadSessionRepositoryImpl.java | 170 +++++++++--------- .../kamcoback/upload/UploadApiController.java | 19 +- .../cd/kamcoback/upload/dto/UploadDto.java | 16 +- .../upload/service/UploadService.java | 89 ++++----- 15 files changed, 185 insertions(+), 239 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java index dd3c6ce8..21f0f669 100644 --- a/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java +++ b/src/main/java/com/kamco/cd/kamcoback/common/utils/FIleChecker.java @@ -490,7 +490,8 @@ public class FIleChecker { return true; } - public static boolean multipartChunkSaveTo(MultipartFile mfile, String targetPath, int chunkIndex) { + public static boolean multipartChunkSaveTo( + MultipartFile mfile, String targetPath, int chunkIndex) { File dest = new File(targetPath, String.valueOf(chunkIndex)); boolean fileUpload = true; diff --git a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java index f254918a..6977cf0f 100644 --- a/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java +++ b/src/main/java/com/kamco/cd/kamcoback/config/SecurityConfig.java @@ -44,7 +44,7 @@ public class SecurityConfig { .authorizeHttpRequests( auth -> auth - // .requestMatchers("/chunk_upload_test.html").authenticated() + // .requestMatchers("/chunk_upload_test.html").authenticated() // 맵시트 영역 전체 허용 (우선순위 최상단) .requestMatchers("/api/mapsheet/**") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index c6b89ae5..fe78d3a4 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -13,7 +13,6 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.transaction.Transactional; import jakarta.validation.Valid; -import java.io.IOException; import java.time.LocalDate; import java.util.UUID; import lombok.RequiredArgsConstructor; @@ -106,33 +105,31 @@ public class ModelMngApiController { return ApiResponseDto.okObject(modelMngService.removeModel(UUID.fromString(uuid))); } - @Operation(summary = "모델등록") @PostMapping public ApiResponseDto ModelMgmt( - @RequestBody @Valid ModelMngDto.AddReq addReq) { + @RequestBody @Valid ModelMngDto.AddReq addReq) { return ApiResponseDto.ok(modelMngService.insertModel(addReq)); } - @Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.") @ApiResponses( - value = { - @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), - @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), - @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), - @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) - }) + value = { + @ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) public ApiResponseDto fileChunkUpload( - @RequestParam("uuid") UUID uuid, - @RequestParam("fileName") String fileName, - @RequestParam("fileSize") long fileSize, - // @RequestParam("fileHash") String fileHash, - @RequestParam("chunkIndex") Integer chunkIndex, - @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, - @RequestPart("chunkFile") MultipartFile chunkFile) { + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { String uploadDivi = "model"; @@ -147,7 +144,7 @@ public class ModelMngApiController { upAddReqDto.setFinalPath(modelDir); upAddReqDto.setTempPath(modelTmpDir); - System.out.println("uuid === "+ uuid); + System.out.println("uuid === " + uuid); return ApiResponseDto.ok(modelMngService.uploadChunkModelFile(upAddReqDto, chunkFile)); } diff --git a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java index 27058f85..96e1ba56 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/dto/ModelMngDto.java @@ -125,9 +125,7 @@ public class ModelMngDto { private String fileName; private String memo; - @JsonIgnore - private UUID uuid; - + @JsonIgnore private UUID uuid; } @Schema(name = "searchReq", description = "검색 요청") diff --git a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java index d50239ed..87534f62 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/service/ModelMngService.java @@ -59,7 +59,6 @@ public class ModelMngService { return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다."); } - public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) { UUID uuid = UUID.randomUUID(); addReq.setUuid(uuid); @@ -67,12 +66,10 @@ public class ModelMngService { return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다."); } - public UploadDto.UploadRes uploadChunkModelFile(UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) - { + public UploadDto.UploadRes uploadChunkModelFile( + UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) { UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile); return upRes; } - - } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java index 87fa15d1..d803a756 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/ModelMngCoreService.java @@ -1,10 +1,7 @@ package com.kamco.cd.kamcoback.postgres.core; -import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto; -import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository; -import jakarta.persistence.EntityNotFoundException; import java.time.LocalDate; import java.util.UUID; import lombok.RequiredArgsConstructor; @@ -35,15 +32,15 @@ public class ModelMngCoreService { .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다.")); */ // id 코드 deleted = true 업데이트 - //entity.deleted(); + // entity.deleted(); modelMngRepository.deleteByModelUuid(uuid); } - public void insertModel(ModelMngDto.AddReq addReq){ + public void insertModel(ModelMngDto.AddReq addReq) { - //ModelMngEntity addEntity = new ModelMngEntity(); - //addEntity.setModelType(addReq.getModelType()); + // ModelMngEntity addEntity = new ModelMngEntity(); + // addEntity.setModelType(addReq.getModelType()); modelMngRepository.insertModel(addReq); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java index f963464c..85c0612d 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/UploadSessionCoreService.java @@ -12,8 +12,7 @@ public class UploadSessionCoreService { private final UploadSessionRepository uploadSessionRepository; - public void createUploadSession(UploadDto.UploadAddReq addReq) - { + public void createUploadSession(UploadDto.UploadAddReq addReq) { /* UUID newUuid = UUID.randomUUID(); @@ -39,18 +38,17 @@ public class UploadSessionCoreService { */ uploadSessionRepository.insertUploadSession(addReq); - } - public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi){ + public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi); } - public UploadDto.uploadDto findByUuid(UUID uuid){ + public UploadDto.uploadDto findByUuid(UUID uuid) { return uploadSessionRepository.findByUuid(uuid); } - public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){ + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) { uploadSessionRepository.updateUploadSessionStatus(addReq); } } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java index 58646a83..18a93105 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/entity/UploadSessionEntity.java @@ -86,5 +86,4 @@ public class UploadSessionEntity { @ColumnDefault("uuid_generate_v4()") @Column(name = "uuid", nullable = false) private UUID uuid; - } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java index e82b4593..b84ea5d3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryCustom.java @@ -20,7 +20,6 @@ public interface ModelMngRepositoryCustom { Optional findByModelUuid(UUID uuid); - void insertModel(ModelMngDto.AddReq addReq); void deleteByModelUuid(UUID uuid); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java index 92cd36ef..db15f3e4 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/model/ModelMngRepositoryImpl.java @@ -123,10 +123,7 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport System.out.println("uuid == " + uuid); return Optional.ofNullable( - queryFactory - .selectFrom(modelMngEntity) - .where(modelMngEntity.uuid.eq(uuid)) - .fetchOne()); + queryFactory.selectFrom(modelMngEntity).where(modelMngEntity.uuid.eq(uuid)).fetchOne()); } private BooleanExpression eventEndedAtBetween( @@ -172,33 +169,32 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport @Override public void insertModel(@Valid ModelMngDto.AddReq addReq) { long execCount = - queryFactory - .insert(modelMngEntity) - .columns( - modelMngEntity.modelVer, - modelMngEntity.modelType, - modelMngEntity.filePath, - modelMngEntity.fileName, - modelMngEntity.memo, - modelMngEntity.uuid) - .values( - addReq.getModelVer(), - addReq.getModelType(), - addReq.getFilePath(), - addReq.getFileName(), - addReq.getMemo(), - addReq.getUuid()) - .execute(); + queryFactory + .insert(modelMngEntity) + .columns( + modelMngEntity.modelVer, + modelMngEntity.modelType, + modelMngEntity.filePath, + modelMngEntity.fileName, + modelMngEntity.memo, + modelMngEntity.uuid) + .values( + addReq.getModelVer(), + addReq.getModelType(), + addReq.getFilePath(), + addReq.getFileName(), + addReq.getMemo(), + addReq.getUuid()) + .execute(); } @Override - public void deleteByModelUuid(UUID uuid){ + public void deleteByModelUuid(UUID uuid) { long execCount = - queryFactory - .update(modelMngEntity) - .set(modelMngEntity.deleted, true) - .where(modelMngEntity.uuid.eq(uuid)) - .execute(); + queryFactory + .update(modelMngEntity) + .set(modelMngEntity.deleted, true) + .where(modelMngEntity.uuid.eq(uuid)) + .execute(); } - } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java index a6502e85..0fae2b64 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryCustom.java @@ -1,19 +1,15 @@ package com.kamco.cd.kamcoback.postgres.repository.upload; - - import com.kamco.cd.kamcoback.upload.dto.UploadDto; import java.util.UUID; public interface UploadSessionRepositoryCustom { - void insertUploadSession(UploadDto.UploadAddReq addReq); UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi); UploadDto.uploadDto findByUuid(UUID uuid); + void updateUploadSessionStatus(UploadDto.UploadAddReq addReq); - - } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java index e6914ee2..0594ae94 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/upload/UploadSessionRepositoryImpl.java @@ -1,6 +1,5 @@ package com.kamco.cd.kamcoback.postgres.repository.upload; - import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity; import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity; @@ -30,113 +29,104 @@ public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport @Override public void insertUploadSession(UploadDto.UploadAddReq addReq) { long execCnt = - queryFactory - .insert(uploadSessionEntity) - .columns( - uploadSessionEntity.uploadId, - uploadSessionEntity.datasetId, - uploadSessionEntity.fileName, - uploadSessionEntity.fileSize, - uploadSessionEntity.finalPath, - uploadSessionEntity.status, - uploadSessionEntity.tempPath, - uploadSessionEntity.chunkIndex, - uploadSessionEntity.chunkTotalIndex, - uploadSessionEntity.uploadDivi, - uploadSessionEntity.fileHash, - uploadSessionEntity.uuid - ) - .values( - addReq.getUploadId(), - addReq.getDatasetId(), - addReq.getFileName(), - addReq.getFileSize(), - addReq.getFinalPath(), - addReq.getStatus(), - addReq.getTempPath(), - addReq.getChunkIndex(), - addReq.getChunkTotalIndex(), - addReq.getUploadDivi(), - addReq.getFileHash(), - addReq.getUuid() - ) - .execute(); - - + queryFactory + .insert(uploadSessionEntity) + .columns( + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid) + .values( + addReq.getUploadId(), + addReq.getDatasetId(), + addReq.getFileName(), + addReq.getFileSize(), + addReq.getFinalPath(), + addReq.getStatus(), + addReq.getTempPath(), + addReq.getChunkIndex(), + addReq.getChunkTotalIndex(), + addReq.getUploadDivi(), + addReq.getFileHash(), + addReq.getUuid()) + .execute(); } - @Override public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) { UploadDto.uploadDto foundContent = - queryFactory - .select( - Projections.constructor( - UploadDto.uploadDto.class, - uploadSessionEntity.uploadId, - uploadSessionEntity.datasetId, - uploadSessionEntity.fileName, - uploadSessionEntity.fileSize, - uploadSessionEntity.finalPath, - uploadSessionEntity.uploadDivi, - uploadSessionEntity.status, - uploadSessionEntity.tempPath, - uploadSessionEntity.chunkIndex, - uploadSessionEntity.chunkTotalIndex, - uploadSessionEntity.fileHash, - uploadSessionEntity.uuid - )) - .from(uploadSessionEntity) - .where(uploadSessionEntity.datasetId.eq(datasetId) - .and(uploadSessionEntity.uploadDivi.eq(uploadDivi))) - .limit(1) - .fetchOne(); + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid)) + .from(uploadSessionEntity) + .where( + uploadSessionEntity + .datasetId + .eq(datasetId) + .and(uploadSessionEntity.uploadDivi.eq(uploadDivi))) + .limit(1) + .fetchOne(); return foundContent; - } - @Override public UploadDto.uploadDto findByUuid(UUID uuid) { UploadDto.uploadDto foundContent = - queryFactory - .select( - Projections.constructor( - UploadDto.uploadDto.class, - uploadSessionEntity.uploadId, - uploadSessionEntity.datasetId, - uploadSessionEntity.fileName, - uploadSessionEntity.fileSize, - uploadSessionEntity.finalPath, - uploadSessionEntity.uploadDivi, - uploadSessionEntity.status, - uploadSessionEntity.tempPath, - uploadSessionEntity.chunkIndex, - uploadSessionEntity.chunkTotalIndex, - uploadSessionEntity.fileHash, - uploadSessionEntity.uuid - )) - .from(uploadSessionEntity) - .where(uploadSessionEntity.uuid.eq(uuid)) - .limit(1) - .fetchOne(); + queryFactory + .select( + Projections.constructor( + UploadDto.uploadDto.class, + uploadSessionEntity.uploadId, + uploadSessionEntity.datasetId, + uploadSessionEntity.fileName, + uploadSessionEntity.fileSize, + uploadSessionEntity.finalPath, + uploadSessionEntity.uploadDivi, + uploadSessionEntity.status, + uploadSessionEntity.tempPath, + uploadSessionEntity.chunkIndex, + uploadSessionEntity.chunkTotalIndex, + uploadSessionEntity.fileHash, + uploadSessionEntity.uuid)) + .from(uploadSessionEntity) + .where(uploadSessionEntity.uuid.eq(uuid)) + .limit(1) + .fetchOne(); return foundContent; - } - - public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq){ + public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) { long fileCount = - queryFactory - .update(uploadSessionEntity) - .set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex()) - .set(uploadSessionEntity.status, addReq.getStatus()) - .where(uploadSessionEntity.uploadId.eq(addReq.getUploadId())) - .execute(); + queryFactory + .update(uploadSessionEntity) + .set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex()) + .set(uploadSessionEntity.status, addReq.getStatus()) + .where(uploadSessionEntity.uploadId.eq(addReq.getUploadId())) + .execute(); } - } diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java index ceae6eb9..97e4ce51 100644 --- a/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/upload/UploadApiController.java @@ -81,13 +81,13 @@ public class UploadApiController { }) @PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) public ApiResponseDto fileChunkUpload( - @RequestParam("uuid") UUID uuid, - @RequestParam("fileName") String fileName, - @RequestParam("fileSize") long fileSize, - // @RequestParam("fileHash") String fileHash, - @RequestParam("chunkIndex") Integer chunkIndex, - @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, - @RequestPart("chunkFile") MultipartFile chunkFile) { + @RequestParam("uuid") UUID uuid, + @RequestParam("fileName") String fileName, + @RequestParam("fileSize") long fileSize, + // @RequestParam("fileHash") String fileHash, + @RequestParam("chunkIndex") Integer chunkIndex, + @RequestParam("chunkTotalIndex") Integer chunkTotalIndex, + @RequestPart("chunkFile") MultipartFile chunkFile) { String uploadDivi = "dataset"; @@ -102,7 +102,7 @@ public class UploadApiController { upAddReqDto.setFinalPath(datasetDir); upAddReqDto.setTempPath(datasetTmpDir); - System.out.println("uuid === "+ uuid); + System.out.println("uuid === " + uuid); return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile)); } @@ -116,8 +116,7 @@ public class UploadApiController { @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) }) @PutMapping("/chunk-upload-complete/{uuid}") - public ApiResponseDto completeUpload( - @PathVariable UUID uuid) { + public ApiResponseDto completeUpload(@PathVariable UUID uuid) { return ApiResponseDto.ok(uploadService.completeUpload(uuid)); } diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java index 7096c87c..5ca61ac4 100644 --- a/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/upload/dto/UploadDto.java @@ -74,8 +74,8 @@ public class UploadDto { private Integer chunkTotalIndex; @Schema( - description = "파일해쉬", - example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") private String fileHash; @Schema(description = "uuid", example = "303d4e24-1726-4272-bbc7-01ab85692b80") @@ -153,7 +153,6 @@ public class UploadDto { @Schema(description = "파일명", example = "data.zip") private String fileName; - @Schema(description = "파일 크기 (bytes)", example = "10737418240") private Long fileSize; @@ -175,15 +174,13 @@ public class UploadDto { @Schema(description = "총 청크 Index", example = "100") private Integer chunkTotalIndex; - @Schema( - description = "파일해쉬", - example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + description = "파일해쉬", + example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") private String fileHash; @Schema(description = "uuid") private UUID uuid; - } @Schema(name = "UploadRes", description = "업로드 수행 후 리턴") @@ -204,13 +201,10 @@ public class UploadDto { if (this.chunkTotalIndex == 0) { return 0.0; } - return (double) (this.chunkIndex+1) / (this.chunkTotalIndex+1) * 100.0; + return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0; } - } - - @Schema(name = "DmlReturn", description = "수행 후 리턴") @Getter @Setter diff --git a/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java index de6c3dae..eae19b6e 100644 --- a/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java +++ b/src/main/java/com/kamco/cd/kamcoback/upload/service/UploadService.java @@ -59,17 +59,17 @@ public class UploadService { UploadDto.UploadRes upRes = new UploadDto.UploadRes(); long datasetId = 0; - if( upAddReqDto.getDatasetId() != null )datasetId = upAddReqDto.getDatasetId(); - String uploadId = System.currentTimeMillis()+""; - //UUID uuid = UUID.randomUUID(); + if (upAddReqDto.getDatasetId() != null) datasetId = upAddReqDto.getDatasetId(); + String uploadId = System.currentTimeMillis() + ""; + // UUID uuid = UUID.randomUUID(); UUID uuid = upAddReqDto.getUuid(); - String tmpDataSetDir = upAddReqDto.getTempPath()+uuid; - String fianlDir = upAddReqDto.getFinalPath()+uuid; + String tmpDataSetDir = upAddReqDto.getTempPath() + uuid; + String fianlDir = upAddReqDto.getFinalPath() + uuid; String uploadDivi = upAddReqDto.getUploadDivi(); - //String fileName = file.getOriginalFilename(); + // String fileName = file.getOriginalFilename(); String fileName = upAddReqDto.getFileName(); Integer chunkIndex = upAddReqDto.getChunkIndex(); - Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex(); + Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex(); String status = FileUploadStatus.INIT.name(); upRes.setUuid(uuid); @@ -83,37 +83,33 @@ public class UploadService { upAddReqDto.setTempPath(tmpDataSetDir); upAddReqDto.setFinalPath(fianlDir); - //세션 신규,중복체크(초기화 포함) + // 세션 신규,중복체크(초기화 포함) UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes); - if( !upRes.getRes().equals("success") )return upRes; - + if (!upRes.getRes().equals("success")) return upRes; status = FileUploadStatus.UPLOADING.name(); upAddReqDto.setStatus(status); - if( dto != null ) - { - tmpDataSetDir = dto.getTempPath(); + if (dto != null) { + tmpDataSetDir = dto.getTempPath(); fianlDir = dto.getFinalPath(); } - //폴더 생성 및 체크 - if( ! checkChunkFoler(upRes, tmpDataSetDir, fianlDir) )return upRes; + // 폴더 생성 및 체크 + if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes; - //chunk저장하기 - if( ! FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex ) ) - { + // chunk저장하기 + if (!FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex)) { upRes.setRes("fail"); - upRes.setResMsg("chunkIndex:"+chunkIndex+" 업로드 애러"); + upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러"); } - if( chunkIndex == chunkTotalIndex ) { + if (chunkIndex == chunkTotalIndex) { upAddReqDto.setUploadId(dto.getUploadId()); upAddReqDto.setStatus(FileUploadStatus.DONE.name()); uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); - try { this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex); @@ -122,14 +118,11 @@ public class UploadService { uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); } catch (IOException e) { - //throw new RuntimeException(e); + // throw new RuntimeException(e); upRes.setRes("fail"); upRes.setResMsg("파일Chunk 병합(merge) 애러"); return upRes; } - - - } return upRes; @@ -149,7 +142,8 @@ public class UploadService { upRes.setChunkTotalIndex(dto.getChunkTotalIndex()); try { - this.mergeChunks(dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex()); + this.mergeChunks( + dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex()); } catch (IOException e) { upRes.setRes("fail"); @@ -159,21 +153,17 @@ public class UploadService { } return upRes; - } - public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) - { + public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) { - if( ! FIleChecker.mkDir(tmpDataSetDir) ) - { + if (!FIleChecker.mkDir(tmpDataSetDir)) { upRes.setRes("fail"); upRes.setRes("CHUNK 폴더 생성 ERROR"); return false; } - if( ! FIleChecker.mkDir(fianlDir) ) - { + if (!FIleChecker.mkDir(fianlDir)) { upRes.setRes("fail"); upRes.setRes("업로드 완료 폴더 생성 ERROR"); return false; @@ -182,28 +172,26 @@ public class UploadService { return true; } - - public UploadDto.uploadDto checkUploadSession(UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) { + public UploadDto.uploadDto checkUploadSession( + UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) { upRes.setRes("success"); upRes.setResMsg("정상처리되었습니다."); UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid()); - if( upAddReqDto.getChunkIndex() == 0 ) { - if( dto != null ) - { + if (upAddReqDto.getChunkIndex() == 0) { + if (dto != null) { upRes.setRes("duplicate"); upRes.setResMsg("이미 진행중인 업로드세션입니다."); return dto; } upAddReqDto.setStatus("UPLOADING"); - upRes.setUuid( upAddReqDto.getUuid() ); + upRes.setUuid(upAddReqDto.getUuid()); uploadSessionCoreService.createUploadSession(upAddReqDto); - } - else { - if( dto == null ){ + } else { + if (dto == null) { upRes.setRes("nosession"); upRes.setResMsg("업로드 세션이 존재하지 않습니다."); return dto; @@ -214,22 +202,22 @@ public class UploadService { uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto); } - if( dto != null )upRes.setUuid( dto.getUuid() ); + if (dto != null) upRes.setUuid(dto.getUuid()); upRes.setChunkIndex(upAddReqDto.getChunkIndex()); upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex()); - return dto; - } - public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) throws IOException { + public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex) + throws IOException { Path outputPath = Paths.get(fianlDir, fileName); - try (FileChannel outChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { + try (FileChannel outChannel = + FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { for (int i = 0; i <= chunkTotalIndex; i++) { - Path chunkPath = Paths.get(tmpDir, i+""); + Path chunkPath = Paths.get(tmpDir, i + ""); try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) { long transferred = 0; @@ -243,10 +231,7 @@ public class UploadService { } } - //병합후 임시 폴더 삭제 + // 병합후 임시 폴더 삭제 FIleChecker.deleteFolder(tmpDir); - } - - } From b7c239d6cf88d7c8454a8b2e03b7b0c3285d60a0 Mon Sep 17 00:00:00 2001 From: Moon Date: Thu, 8 Jan 2026 18:48:11 +0900 Subject: [PATCH 4/8] =?UTF-8?q?=EA=B2=BD=EB=A1=9C=EC=84=A4=EC=A0=95?= =?UTF-8?q?=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/resources/application.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index ef668085..ac714724 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -66,8 +66,8 @@ management: file: - sync-root-dir: D:/app/original-images/ - #sync-root-dir: /app/original-images/ + #sync-root-dir: D:/app/original-images/ + sync-root-dir: /app/original-images/ sync-tmp-dir: ${file.sync-root-dir}tmp/ sync-file-extention: tfw,tif @@ -76,5 +76,5 @@ file: dataset-tmp-dir: ${file.dataset-dir}tmp/ #model-dir: D:/app/model/ - dataset-dir: /app/model/ + model-dir: /app/model/ model-tmp-dir: ${file.model-dir}tmp/ From 13c8e7b09bfad3153f2f77575737824c0e8144ec Mon Sep 17 00:00:00 2001 From: Moon Date: Fri, 9 Jan 2026 09:37:03 +0900 Subject: [PATCH 5/8] =?UTF-8?q?=EB=AA=A8=EB=8D=B8=EC=82=AD=EC=A0=9C?= =?UTF-8?q?=EC=88=98=EC=A0=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../model/ModelMngApiController.java | 54 ++++++++++++------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index fe78d3a4..7bb50358 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -81,31 +81,45 @@ public class ModelMngApiController { return ApiResponseDto.ok(result); } - @Operation(summary = "삭제", description = "모델을 삭제 합니다.") + @Operation(summary = "모델삭제", description = "모델을 삭제 합니다.") @ApiResponses( - value = { - @ApiResponse( - responseCode = "204", - description = "모델 삭제 성공", - content = - @Content( - mediaType = "application/json", - schema = @Schema(implementation = Long.class))), - @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), - @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), - @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) - }) + value = { + @ApiResponse( + responseCode = "201", + description = "등록 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Long.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @DeleteMapping("/{uuid}") public ApiResponseDto removeModel( - @io.swagger.v3.oas.annotations.parameters.RequestBody( - description = "모델 삭제 요청 정보", - required = true) - @PathVariable - String uuid) { - return ApiResponseDto.okObject(modelMngService.removeModel(UUID.fromString(uuid))); + @io.swagger.v3.oas.annotations.parameters.RequestBody( + description = "모델 삭제 요청 정보", + required = true) + @PathVariable + String uuid) { + + return ApiResponseDto.ok(modelMngService.removeModel(UUID.fromString(uuid))); } - @Operation(summary = "모델등록") + @Operation(summary = "모델등록", description = "모델을 등록 합니다.") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "등록 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Long.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping public ApiResponseDto ModelMgmt( @RequestBody @Valid ModelMngDto.AddReq addReq) { From 9c395cb0eec717b24d0e12a96d3baaba3fde04e7 Mon Sep 17 00:00:00 2001 From: "gayoun.park" Date: Fri, 9 Jan 2026 09:40:41 +0900 Subject: [PATCH 6/8] spotless --- .../model/ModelMngApiController.java | 58 +++++++++---------- .../label/LabelWorkRepositoryImpl.java | 11 ++-- 2 files changed, 34 insertions(+), 35 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java index 7bb50358..28914ec1 100644 --- a/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/model/ModelMngApiController.java @@ -83,43 +83,43 @@ public class ModelMngApiController { @Operation(summary = "모델삭제", description = "모델을 삭제 합니다.") @ApiResponses( - value = { - @ApiResponse( - responseCode = "201", - description = "등록 성공", - content = - @Content( - mediaType = "application/json", - schema = @Schema(implementation = Long.class))), - @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), - @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), - @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) - }) + value = { + @ApiResponse( + responseCode = "201", + description = "등록 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Long.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @DeleteMapping("/{uuid}") public ApiResponseDto removeModel( - @io.swagger.v3.oas.annotations.parameters.RequestBody( - description = "모델 삭제 요청 정보", - required = true) - @PathVariable - String uuid) { + @io.swagger.v3.oas.annotations.parameters.RequestBody( + description = "모델 삭제 요청 정보", + required = true) + @PathVariable + String uuid) { return ApiResponseDto.ok(modelMngService.removeModel(UUID.fromString(uuid))); } @Operation(summary = "모델등록", description = "모델을 등록 합니다.") @ApiResponses( - value = { - @ApiResponse( - responseCode = "201", - description = "등록 성공", - content = - @Content( - mediaType = "application/json", - schema = @Schema(implementation = Long.class))), - @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), - @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), - @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) - }) + value = { + @ApiResponse( + responseCode = "201", + description = "등록 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Long.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) @PostMapping public ApiResponseDto ModelMgmt( @RequestBody @Valid ModelMngDto.AddReq addReq) { diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java index 10308aed..1ee68346 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java @@ -57,7 +57,6 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { QLabelingAssignmentEntity.labelingAssignmentEntity; private final QMemberEntity memberEntity = QMemberEntity.memberEntity; - /** * 변화탐지 년도 셀렉트박스 조회 * @@ -81,8 +80,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { } /** - * 라벨링 작업관리 목록 조회 - * (복잡한 집계 쿼리로 인해 DTO 직접 반환) + * 라벨링 작업관리 목록 조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환) * * @param searchReq 검색 조건 * @return 라벨링 작업관리 목록 페이지 @@ -345,7 +343,9 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { mapSheetAnalInferenceEntity .uuid .eq(uuid) - .and(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))) + .and( + labelingAssignmentEntity.analUid.eq( + mapSheetAnalInferenceEntity.id))) .innerJoin(memberEntity) .on(whereSubBuilder) .where(whereBuilder) @@ -390,8 +390,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { } /** - * 작업배정 상세조회 - * (복잡한 집계 쿼리로 인해 DTO 직접 반환) + * 작업배정 상세조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환) * * @param uuid 작업배정 UUID * @return 작업배정 상세 정보 From 3b02296760d885cc94ab9176816a5ffa3ec4f76d Mon Sep 17 00:00:00 2001 From: "gayoun.park" Date: Fri, 9 Jan 2026 09:49:05 +0900 Subject: [PATCH 7/8] =?UTF-8?q?=EB=9D=BC=EB=B2=A8=EB=A7=81=20=EC=A2=85?= =?UTF-8?q?=EB=A3=8C=20=EC=83=81=ED=83=9C=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java b/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java index db764638..a945e5f0 100644 --- a/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/label/dto/LabelAllocateDto.java @@ -27,7 +27,8 @@ public class LabelAllocateDto { LABEL_COMPLETE("라벨완료"), INSPECT_REQ("검수요청"), INSPECT_ING("검수진행중"), - INSPECT_COMPLETE("검수완료"); + INSPECT_COMPLETE("검수완료"), + FINISH("종료"); private String desc; From 3e6afde0d32ed2835f60a2acab6d1205677c408a Mon Sep 17 00:00:00 2001 From: teddy Date: Fri, 9 Jan 2026 09:50:17 +0900 Subject: [PATCH 8/8] =?UTF-8?q?[KC-99]=20=EC=B6=94=EB=A1=A0=EA=B4=80?= =?UTF-8?q?=EB=A6=AC=20=EB=93=B1=EB=A1=9D=20api=20=EC=B6=94=EA=B0=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../InferenceResultApiController.java | 27 ++++++ .../inference/dto/InferenceResultDto.java | 83 ++++++++++++++++--- .../service/InferenceResultService.java | 9 ++ .../core/InferenceResultCoreService.java | 19 +++++ .../label/LabelWorkRepositoryImpl.java | 11 ++- 5 files changed, 130 insertions(+), 19 deletions(-) diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java index a5a82719..3325bd64 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/InferenceResultApiController.java @@ -11,10 +11,13 @@ import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.validation.Valid; import java.time.LocalDate; import lombok.RequiredArgsConstructor; import org.springframework.data.domain.Page; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @@ -59,6 +62,30 @@ public class InferenceResultApiController { return ApiResponseDto.ok(analResList); } + @Operation(summary = "변화탐지 실행 정보 입력", description = "어드민 홈 > 추론관리 > 추론목록 > 변화탐지 실행 정보 입력") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "201", + description = "변화탐지 실행 정보 생성 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = Void.class))), + @ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + @PostMapping("/reg") + public ApiResponseDto saveInferenceInfo( + @io.swagger.v3.oas.annotations.parameters.RequestBody( + description = "변화탐지 실행 정보 저장 요청 정보", + required = true) + @RequestBody + @Valid + InferenceResultDto.RegReq req) { + return ApiResponseDto.ok(null); + } + // @ApiResponses( // value = { // @ApiResponse( diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java index 133b38cf..26efc23d 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/dto/InferenceResultDto.java @@ -1,9 +1,14 @@ package com.kamco.cd.kamcoback.inference.dto; import com.kamco.cd.kamcoback.common.utils.enums.EnumType; +import com.kamco.cd.kamcoback.common.utils.interfaces.EnumValid; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; import java.time.LocalDate; import java.time.ZonedDateTime; +import java.util.List; import java.util.UUID; import lombok.AllArgsConstructor; import lombok.Getter; @@ -14,6 +19,7 @@ import org.springframework.data.domain.Pageable; public class InferenceResultDto { + /** 목록조회 dto */ @Getter @Setter @AllArgsConstructor @@ -32,6 +38,7 @@ public class InferenceResultDto { @JsonFormatDttm private ZonedDateTime applyDttm; } + /** 목록조회 검색 조건 dto */ @Getter @Setter @NoArgsConstructor @@ -53,6 +60,27 @@ public class InferenceResultDto { } } + /** 탐지 데이터 옵션 dto */ + @Getter + @AllArgsConstructor + public enum MapSheetScope implements EnumType { + EXCL("추론제외"), + PREV("이전 년도 도엽 사용"), + ; + private final String desc; + + @Override + public String getId() { + return name(); + } + + @Override + public String getText() { + return desc; + } + } + + /** 분석대상 도엽 enum */ @Getter @AllArgsConstructor public enum DetectOption implements EnumType { @@ -72,22 +100,51 @@ public class InferenceResultDto { } } + /** 변화탐지 실행 정보 저장 요청 정보 */ @Getter + @Setter + @NoArgsConstructor @AllArgsConstructor - public enum MapSheetScope implements EnumType { - EXCL("추론제외"), - PREV("이전 년도 도엽 사용"), - ; - private final String desc; + public static class RegReq { - @Override - public String getId() { - return name(); - } + @Schema(description = "제목", example = "2025-2026 track changes Pororo") + @NotBlank + private String title; - @Override - public String getText() { - return desc; - } + @Schema(description = "M1", example = "2") + @NotNull + private Long model1Uid; + + @Schema(description = "M2", example = "4") + @NotNull + private Long model2Uid; + + @Schema(description = "M3", example = "7") + @NotNull + private Long model3Uid; + + @Schema(description = "비교년도", example = "2003") + @NotNull + private Integer compareYyyy; + + @Schema(description = "탐지년도", example = "2004") + @NotNull + private Integer targetYyyy; + + @Schema(description = "탐지 데이터 옵션 - 추론제외(PREV), 이전 년도 도엽 사용(PREV)", example = "EXCL") + @NotBlank + @EnumValid( + enumClass = MapSheetScope.class, + message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") + private String mapSheetScope; + + @Schema(description = "분석대상 도엽 - 전체(ALL), 부분(PART)", example = "PART") + @NotBlank + @EnumValid(enumClass = DetectOption.class, message = "분석대상 도엽 옵션은 '전체', '부분' 만 사용 가능합니다.") + private String detectOption; + + @Schema(description = "5k 도협 번호 목록", example = "[34607067,34607067]") + @NotNull + private List mapSheetNum; } } diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java index 285e1527..aa2a0daa 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java @@ -31,6 +31,15 @@ public class InferenceResultService { return inferenceResultCoreService.getInferenceResultList(req); } + /** + * 변화탐지 실행 정보 생성 + * + * @param req + */ + public void saveInferenceInfo(InferenceResultDto.RegReq req) { + inferenceResultCoreService.saveInferenceInfo(req); + } + /** * 분석결과 요약정보 * diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java index 1b4443d4..34fe6e51 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java @@ -1,5 +1,6 @@ package com.kamco.cd.kamcoback.postgres.core; +import com.fasterxml.jackson.databind.ObjectMapper; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard; import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet; @@ -16,6 +17,7 @@ import jakarta.persistence.EntityNotFoundException; import jakarta.validation.constraints.NotNull; import java.util.List; import lombok.RequiredArgsConstructor; +import org.locationtech.jts.io.geojson.GeoJsonWriter; import org.springframework.data.domain.Page; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -28,6 +30,9 @@ public class InferenceResultCoreService { private final MapSheetLearnRepository mapSheetLearnRepository; private final MapInkx5kRepository mapInkx5kRepository; + private final ObjectMapper objectMapper = new ObjectMapper(); + private final GeoJsonWriter geoJsonWriter = new GeoJsonWriter(); + /** * 추론관리 목록 * @@ -39,6 +44,20 @@ public class InferenceResultCoreService { return list.map(MapSheetLearnEntity::toDto); } + /** + * 변화탐지 실행 정보 생성 + * + * @param req + */ + public void saveInferenceInfo(InferenceResultDto.RegReq req) { + MapSheetLearnEntity mapSheetLearnEntity = new MapSheetLearnEntity(); + mapSheetLearnEntity.setTitle(req.getTitle()); + mapSheetLearnEntity.setM1ModelUid(req.getModel1Uid()); + mapSheetLearnEntity.setM2ModelUid(req.getModel2Uid()); + mapSheetLearnEntity.setM3ModelUid(req.getModel3Uid()); + // mapSheetLearnRepository.save() + } + /****/ /** diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java index 10308aed..1ee68346 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/label/LabelWorkRepositoryImpl.java @@ -57,7 +57,6 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { QLabelingAssignmentEntity.labelingAssignmentEntity; private final QMemberEntity memberEntity = QMemberEntity.memberEntity; - /** * 변화탐지 년도 셀렉트박스 조회 * @@ -81,8 +80,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { } /** - * 라벨링 작업관리 목록 조회 - * (복잡한 집계 쿼리로 인해 DTO 직접 반환) + * 라벨링 작업관리 목록 조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환) * * @param searchReq 검색 조건 * @return 라벨링 작업관리 목록 페이지 @@ -345,7 +343,9 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { mapSheetAnalInferenceEntity .uuid .eq(uuid) - .and(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))) + .and( + labelingAssignmentEntity.analUid.eq( + mapSheetAnalInferenceEntity.id))) .innerJoin(memberEntity) .on(whereSubBuilder) .where(whereBuilder) @@ -390,8 +390,7 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { } /** - * 작업배정 상세조회 - * (복잡한 집계 쿼리로 인해 DTO 직접 반환) + * 작업배정 상세조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환) * * @param uuid 작업배정 UUID * @return 작업배정 상세 정보