Merge pull request 'feat/infer_dev_260107' (#162) from feat/infer_dev_260107 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/162
This commit is contained in:
2026-01-09 09:41:19 +09:00
22 changed files with 1373 additions and 84 deletions

View File

@@ -0,0 +1,28 @@
package com.kamco.cd.kamcoback.common.enums;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import lombok.AllArgsConstructor;
import lombok.Getter;
@CodeExpose
@Getter
@AllArgsConstructor
public enum FileUploadStatus implements EnumType {
INIT("초기화"),
UPLOADING("업로드중"),
DONE("업로드완료"),
MERGED("병합완료");
private final String desc;
@Override
public String getId() {
return name();
}
@Override
public String getText() {
return desc;
}
}

View File

@@ -29,6 +29,7 @@ import lombok.Getter;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverage2D;
import org.geotools.gce.geotiff.GeoTiffReader; import org.geotools.gce.geotiff.GeoTiffReader;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
public class FIleChecker { public class FIleChecker {
@@ -489,6 +490,24 @@ public class FIleChecker {
return true; return true;
} }
public static boolean multipartChunkSaveTo(
MultipartFile mfile, String targetPath, int chunkIndex) {
File dest = new File(targetPath, String.valueOf(chunkIndex));
boolean fileUpload = true;
try {
mfile.transferTo(dest);
} catch (IOException e) {
return false;
}
return true;
}
public static boolean deleteFolder(String path) {
return FileSystemUtils.deleteRecursively(new File(path));
}
public static boolean validationMultipart(MultipartFile mfile) { public static boolean validationMultipart(MultipartFile mfile) {
// 파일 유효성 검증 // 파일 유효성 검증
if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) { if (mfile == null || mfile.isEmpty() || mfile.getSize() == 0) {

View File

@@ -44,6 +44,8 @@ public class SecurityConfig {
.authorizeHttpRequests( .authorizeHttpRequests(
auth -> auth ->
auth auth
// .requestMatchers("/chunk_upload_test.html").authenticated()
// 맵시트 영역 전체 허용 (우선순위 최상단) // 맵시트 영역 전체 허용 (우선순위 최상단)
.requestMatchers("/api/mapsheet/**") .requestMatchers("/api/mapsheet/**")
.permitAll() .permitAll()
@@ -77,7 +79,11 @@ public class SecurityConfig {
"/api/auth/logout", "/api/auth/logout",
"/swagger-ui/**", "/swagger-ui/**",
"/api/members/*/password", "/api/members/*/password",
"/v3/api-docs/**") "/v3/api-docs/**",
"/chunk_upload_test.html",
"/api/model/file-chunk-upload",
"/api/upload/file-chunk-upload",
"/api/upload/chunk-upload-complete")
.permitAll() .permitAll()
// 로그인한 사용자만 가능 IAM // 로그인한 사용자만 가능 IAM
.requestMatchers("/api/user/**", "/api/my/menus", "/api/code/**") .requestMatchers("/api/user/**", "/api/my/menus", "/api/code/**")

View File

@@ -194,14 +194,14 @@ public class LabelWorkDto {
@Schema(description = "1일전처리개수") @Schema(description = "1일전처리개수")
private Long day1AgoDoneCnt; private Long day1AgoDoneCnt;
public Long getremindCnt() { public Long getRemainCnt() {
return this.assignedCnt - this.doneCnt; return this.assignedCnt - this.doneCnt;
} }
public double getDoneRate() { public double getDoneRate() {
Long dayDoneCnt = this.day3AgoDoneCnt + this.day2AgoDoneCnt + this.day1AgoDoneCnt; long dayDoneCnt = this.day3AgoDoneCnt + this.day2AgoDoneCnt + this.day1AgoDoneCnt;
if (dayDoneCnt == null || dayDoneCnt == 0) { if (dayDoneCnt == 0) {
return 0.0; return 0.0;
} }
return (double) dayDoneCnt / 3; return (double) dayDoneCnt / 3;

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.common.utils.zip.ZipUtils;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService; import com.kamco.cd.kamcoback.model.service.ModelMngService;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
@@ -11,16 +12,19 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag; import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.transaction.Transactional; import jakarta.transaction.Transactional;
import java.io.IOException; import jakarta.validation.Valid;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart; import org.springframework.web.bind.annotation.RequestPart;
@@ -36,6 +40,27 @@ public class ModelMngApiController {
private final ModelMngService modelMngService; private final ModelMngService modelMngService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
@Autowired private ZipUtils zipUtils; @Autowired private ZipUtils zipUtils;
@Operation(summary = "모델관리 목록") @Operation(summary = "모델관리 목록")
@@ -56,12 +81,12 @@ public class ModelMngApiController {
return ApiResponseDto.ok(result); return ApiResponseDto.ok(result);
} }
@Operation(summary = "삭제", description = "모델을 삭제 합니다.") @Operation(summary = "모델삭제", description = "모델을 삭제 합니다.")
@ApiResponses( @ApiResponses(
value = { value = {
@ApiResponse( @ApiResponse(
responseCode = "204", responseCode = "201",
description = "모델 삭제 성공", description = "등록 성공",
content = content =
@Content( @Content(
mediaType = "application/json", mediaType = "application/json",
@@ -70,19 +95,71 @@ public class ModelMngApiController {
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
}) })
@DeleteMapping("/{modelVer}") @DeleteMapping("/{uuid}")
public ApiResponseDto<ApiResponseDto.ResponseObj> removeModel( public ApiResponseDto<ApiResponseDto.ResponseObj> removeModel(
@io.swagger.v3.oas.annotations.parameters.RequestBody( @io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "모델 삭제 요청 정보", description = "모델 삭제 요청 정보",
required = true) required = true)
@PathVariable @PathVariable
String modelVer) { String uuid) {
return ApiResponseDto.okObject(modelMngService.removeModel(modelVer));
return ApiResponseDto.ok(modelMngService.removeModel(UUID.fromString(uuid)));
} }
@Operation(summary = "모델 zip 파일 업로드", description = "모델 zip 파일 업로드") @Operation(summary = "모델등록", description = "모델을 등록 합니다.")
@PostMapping(value = "/upload/zip", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @ApiResponses(
public void upload(@RequestPart MultipartFile zipFilie) throws IOException { value = {
zipUtils.processZip(zipFilie.getInputStream()); @ApiResponse(
responseCode = "201",
description = "등록 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Long.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping
public ApiResponseDto<ApiResponseDto.ResponseObj> ModelMgmt(
@RequestBody @Valid ModelMngDto.AddReq addReq) {
return ApiResponseDto.ok(modelMngService.insertModel(addReq));
}
@Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<UploadDto.UploadRes> fileChunkUpload(
@RequestParam("uuid") UUID uuid,
@RequestParam("fileName") String fileName,
@RequestParam("fileSize") long fileSize,
// @RequestParam("fileHash") String fileHash,
@RequestParam("chunkIndex") Integer chunkIndex,
@RequestParam("chunkTotalIndex") Integer chunkTotalIndex,
@RequestPart("chunkFile") MultipartFile chunkFile) {
String uploadDivi = "model";
UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq();
upAddReqDto.setDatasetId(0L);
upAddReqDto.setUuid(uuid);
upAddReqDto.setFileName(fileName);
upAddReqDto.setFileSize(fileSize);
upAddReqDto.setChunkIndex(chunkIndex);
upAddReqDto.setChunkTotalIndex(chunkTotalIndex);
upAddReqDto.setUploadDivi(uploadDivi);
upAddReqDto.setFinalPath(modelDir);
upAddReqDto.setTempPath(modelTmpDir);
System.out.println("uuid === " + uuid);
return ApiResponseDto.ok(modelMngService.uploadChunkModelFile(upAddReqDto, chunkFile));
} }
} }

View File

@@ -1,11 +1,13 @@
package com.kamco.cd.kamcoback.model.dto; package com.kamco.cd.kamcoback.model.dto;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose; import com.kamco.cd.kamcoback.common.utils.enums.CodeExpose;
import com.kamco.cd.kamcoback.common.utils.enums.EnumType; import com.kamco.cd.kamcoback.common.utils.enums.EnumType;
import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm; import com.kamco.cd.kamcoback.common.utils.interfaces.JsonFormatDttm;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
@@ -96,7 +98,6 @@ public class ModelMngDto {
@AllArgsConstructor @AllArgsConstructor
public static class ModelList { public static class ModelList {
private Integer rowNum;
private String modelVer; private String modelVer;
private String fileName; private String fileName;
private String modelType; private String modelType;
@@ -108,6 +109,7 @@ public class ModelMngDto {
private BigDecimal iou; private BigDecimal iou;
private String memo; private String memo;
private Boolean deleted; private Boolean deleted;
private UUID uuid;
} }
@Schema(name = "ModelAddReq", description = "모델 등록 req") @Schema(name = "ModelAddReq", description = "모델 등록 req")
@@ -118,10 +120,12 @@ public class ModelMngDto {
public static class AddReq { public static class AddReq {
private String modelType; private String modelType;
private String dockerFileNm;
private String modelVer; private String modelVer;
private String hyperVer; private String filePath;
private String epochVer; private String fileName;
private String memo;
@JsonIgnore private UUID uuid;
} }
@Schema(name = "searchReq", description = "검색 요청") @Schema(name = "searchReq", description = "검색 요청")

View File

@@ -3,10 +3,15 @@ package com.kamco.cd.kamcoback.model.service;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto; import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService; import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.kamco.cd.kamcoback.upload.service.UploadService;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -14,6 +19,29 @@ public class ModelMngService {
private final ModelMngCoreService modelMngCoreService; private final ModelMngCoreService modelMngCoreService;
private final UploadService uploadService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
public Page<ModelMngDto.ModelList> findModelMgmtList( public Page<ModelMngDto.ModelList> findModelMgmtList(
ModelMngDto.searchReq searchReq, ModelMngDto.searchReq searchReq,
LocalDate startDate, LocalDate startDate,
@@ -24,7 +52,24 @@ public class ModelMngService {
searchReq, startDate, endDate, modelType, searchVal); searchReq, startDate, endDate, modelType, searchVal);
} }
public ApiResponseDto.ResponseObj removeModel(String modelVer) { public ApiResponseDto.ResponseObj removeModel(UUID uuid) {
return modelMngCoreService.removeModel(modelVer);
modelMngCoreService.removeModel(uuid);
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "삭제되었습니다.");
}
public ApiResponseDto.ResponseObj insertModel(ModelMngDto.AddReq addReq) {
UUID uuid = UUID.randomUUID();
addReq.setUuid(uuid);
modelMngCoreService.insertModel(addReq);
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "등록되었습니다.");
}
public UploadDto.UploadRes uploadChunkModelFile(
UploadDto.UploadAddReq upAddReqDto, MultipartFile chunkFile) {
UploadDto.UploadRes upRes = uploadService.uploadChunk(upAddReqDto, chunkFile);
return upRes;
} }
} }

View File

@@ -1,11 +1,9 @@
package com.kamco.cd.kamcoback.postgres.core; package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.model.dto.ModelMngDto; import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository; import com.kamco.cd.kamcoback.postgres.repository.model.ModelMngRepository;
import jakarta.persistence.EntityNotFoundException;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -26,14 +24,24 @@ public class ModelMngCoreService {
searchReq, startDate, endDate, modelType, searchVal); searchReq, startDate, endDate, modelType, searchVal);
} }
public ApiResponseDto.ResponseObj removeModel(String modelVer) { public void removeModel(UUID uuid) {
/*
ModelMngEntity entity = ModelMngEntity entity =
modelMngRepository modelMngRepository
.findByModelUid(modelVer) .findByModelUuid(uuid)
.orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다. ver: " + modelVer)); .orElseThrow(() -> new EntityNotFoundException("model을 찾을 수 없습니다."));
*/
// id 코드 deleted = true 업데이트 // id 코드 deleted = true 업데이트
entity.deleted(); // entity.deleted();
return new ApiResponseDto.ResponseObj(ApiResponseDto.ApiResponseCode.OK, "");
modelMngRepository.deleteByModelUuid(uuid);
}
public void insertModel(ModelMngDto.AddReq addReq) {
// ModelMngEntity addEntity = new ModelMngEntity();
// addEntity.setModelType(addReq.getModelType());
modelMngRepository.insertModel(addReq);
} }
} }

View File

@@ -0,0 +1,54 @@
package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.postgres.repository.upload.UploadSessionRepository;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class UploadSessionCoreService {
private final UploadSessionRepository uploadSessionRepository;
public void createUploadSession(UploadDto.UploadAddReq addReq) {
/*
UUID newUuid = UUID.randomUUID();
UploadSessionEntity entity = new UploadSessionEntity();
entity.setUploadId(addReq.getUploadId());
entity.setDatasetId(addReq.getDatasetId());
entity.setFileName(addReq.getFileName());
entity.setFileSize(addReq.getFileSize());
entity.setFinalPath(addReq.getFinalPath());
entity.setStatus(addReq.getStatus());
entity.setTempPath(addReq.getTempPath());
entity.setChunkIndex(addReq.getChunkIndex());
entity.setChunkTotalIndex(addReq.getChunkTotalIndex());
entity.setUploadDivi(addReq.getUploadDivi());
entity.setFileHash(addReq.getFileHash());
entity.setUuid(newUuid);
//System.out.println("======================");
UploadSessionEntity saved = uploadSessionRepository.save(entity);
return String.valueOf(saved.getUuid());
*/
uploadSessionRepository.insertUploadSession(addReq);
}
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
return uploadSessionRepository.findByDatasetUid(datasetId, uploadDivi);
}
public UploadDto.uploadDto findByUuid(UUID uuid) {
return uploadSessionRepository.findByUuid(uuid);
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
uploadSessionRepository.updateUploadSessionStatus(addReq);
}
}

View File

@@ -8,6 +8,7 @@ import jakarta.persistence.GenerationType;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.UUID;
import lombok.Getter; import lombok.Getter;
import lombok.Setter; import lombok.Setter;
@@ -52,6 +53,9 @@ public class ModelMngEntity extends CommonDateEntity {
@Column(name = "memo") @Column(name = "memo")
private String memo; private String memo;
@Column(name = "uuid")
private UUID uuid;
public void deleted() { public void deleted() {
this.deleted = true; this.deleted = true;
} }

View File

@@ -0,0 +1,89 @@
package com.kamco.cd.kamcoback.postgres.entity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import java.time.OffsetDateTime;
import java.util.UUID;
import lombok.Getter;
import lombok.Setter;
import org.hibernate.annotations.ColumnDefault;
@Getter
@Setter
@Entity
@Table(name = "tb_upload_session")
public class UploadSessionEntity {
@Id
@Size(max = 100)
@Column(name = "upload_id", nullable = false, length = 100)
private String uploadId;
@Column(name = "completed_dttm")
private OffsetDateTime completedDttm;
@NotNull
@ColumnDefault("now()")
@Column(name = "created_dttm", nullable = false)
private OffsetDateTime createdDttm;
@Column(name = "dataset_id")
private Long datasetId;
@Column(name = "error_message", length = Integer.MAX_VALUE)
private String errorMessage;
@Size(max = 255)
@Column(name = "file_name")
private String fileName;
@ColumnDefault("0")
@Column(name = "file_size")
private Long fileSize;
@Size(max = 500)
@Column(name = "final_path", length = 500)
private String finalPath;
@Size(max = 20)
@Column(name = "status", length = 20)
private String status;
@Size(max = 500)
@Column(name = "temp_path", length = 500)
private String tempPath;
@Column(name = "chunk_total_index")
private Integer chunkTotalIndex;
@NotNull
@ColumnDefault("now()")
@Column(name = "updated_dttm", nullable = false)
private OffsetDateTime updatedDttm;
@Column(name = "chunk_index")
private Integer chunkIndex;
@Size(max = 50)
@Column(name = "upload_divi", length = 50)
private String uploadDivi;
@Size(max = 300)
@Column(name = "file_hash", length = 300)
private String fileHash;
@Column(name = "total_chunks")
private Integer totalChunks;
@Column(name = "uploaded_chunks")
private Integer uploadedChunks;
@NotNull
@ColumnDefault("uuid_generate_v4()")
@Column(name = "uuid", nullable = false)
private UUID uuid;
}

View File

@@ -1,17 +1,16 @@
package com.kamco.cd.kamcoback.postgres.repository.label; package com.kamco.cd.kamcoback.postgres.repository.label;
import static com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity.labelingAssignmentEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState; import com.kamco.cd.kamcoback.label.dto.LabelAllocateDto.LabelState;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMng;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.LabelWorkMngDetail;
import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState; import com.kamco.cd.kamcoback.label.dto.LabelWorkDto.WorkerState;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity; import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QLabelingAssignmentEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.QMemberEntity;
import com.querydsl.core.BooleanBuilder; import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Expression; import com.querydsl.core.types.Expression;
import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.OrderSpecifier;
@@ -21,7 +20,6 @@ import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.DateTimePath; import com.querydsl.core.types.dsl.DateTimePath;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression; import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions; import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import java.time.LocalDate; import java.time.LocalDate;
@@ -30,6 +28,7 @@ import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -46,12 +45,22 @@ import org.springframework.stereotype.Repository;
public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom { public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
private final JPAQueryFactory queryFactory; private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
// Q클래스 필드 선언
private final QMapSheetAnalInferenceEntity mapSheetAnalInferenceEntity =
QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
private final QMapSheetAnalDataInferenceEntity mapSheetAnalDataInferenceEntity =
QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
private final QMapSheetAnalDataInferenceGeomEntity mapSheetAnalDataInferenceGeomEntity =
QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
private final QLabelingAssignmentEntity labelingAssignmentEntity =
QLabelingAssignmentEntity.labelingAssignmentEntity;
private final QMemberEntity memberEntity = QMemberEntity.memberEntity;
/** /**
* 변화탐지 년도 셀렉트박스 조회 * 변화탐지 년도 셀렉트박스 조회
* *
* @return * @return 변화탐지 년도 목록 (Entity 반환)
*/ */
@Override @Override
public List<MapSheetAnalInferenceEntity> findChangeDetectYearList() { public List<MapSheetAnalInferenceEntity> findChangeDetectYearList() {
@@ -71,10 +80,10 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
} }
/** /**
* 라벨링 작업관리 목록 조회 * 라벨링 작업관리 목록 조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환)
* *
* @param searchReq * @param searchReq 검색 조건
* @return * @return 라벨링 작업관리 목록 페이지
*/ */
@Override @Override
public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq) { public Page<LabelWorkMng> labelWorkMngList(LabelWorkDto.LabelWorkMngSearchReq searchReq) {
@@ -214,7 +223,9 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
.limit(pageable.getPageSize()) .limit(pageable.getPageSize())
.fetch(); .fetch();
Long total = // Count 쿼리 별도 실행 (null safe handling)
long total =
Optional.ofNullable(
queryFactory queryFactory
.select(mapSheetAnalInferenceEntity.uuid.countDistinct()) .select(mapSheetAnalInferenceEntity.uuid.countDistinct())
.from(mapSheetAnalInferenceEntity) .from(mapSheetAnalInferenceEntity)
@@ -223,7 +234,8 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
.innerJoin(mapSheetAnalDataInferenceGeomEntity) .innerJoin(mapSheetAnalDataInferenceGeomEntity)
.on(whereSubBuilder) .on(whereSubBuilder)
.where(whereBuilder) .where(whereBuilder)
.fetchOne(); .fetchOne())
.orElse(0L);
return new PageImpl<>(foundContent, pageable, total); return new PageImpl<>(foundContent, pageable, total);
} }
@@ -320,7 +332,9 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
.limit(pageable.getPageSize()) .limit(pageable.getPageSize())
.fetch(); .fetch();
Long countQuery = // Count 쿼리 별도 실행 (null safe handling)
long countQuery =
Optional.ofNullable(
queryFactory queryFactory
.select(labelingAssignmentEntity.workerUid.countDistinct()) .select(labelingAssignmentEntity.workerUid.countDistinct())
.from(labelingAssignmentEntity) .from(labelingAssignmentEntity)
@@ -329,12 +343,14 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
mapSheetAnalInferenceEntity mapSheetAnalInferenceEntity
.uuid .uuid
.eq(uuid) .eq(uuid)
.and(labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id))) .and(
labelingAssignmentEntity.analUid.eq(
mapSheetAnalInferenceEntity.id)))
.innerJoin(memberEntity) .innerJoin(memberEntity)
.on(whereSubBuilder) .on(whereSubBuilder)
.where(whereBuilder) .where(whereBuilder)
// .groupBy(memberEntity.userRole, memberEntity.name, memberEntity.userId) .fetchOne())
.fetchOne(); .orElse(0L);
return new PageImpl<>(foundContent, pageable, countQuery); return new PageImpl<>(foundContent, pageable, countQuery);
} }
@@ -374,10 +390,10 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
} }
/** /**
* 작업배정 상세조회 * 작업배정 상세조회 (복잡한 집계 쿼리로 인해 DTO 직접 반환)
* *
* @param uuid * @param uuid 작업배정 UUID
* @return * @return 작업배정 상세 정보
*/ */
@Override @Override
public LabelWorkMngDetail findLabelWorkMngDetail(UUID uuid) { public LabelWorkMngDetail findLabelWorkMngDetail(UUID uuid) {
@@ -419,19 +435,13 @@ public class LabelWorkRepositoryImpl implements LabelWorkRepositoryCustom {
} }
public NumberExpression<Long> caseSumExpression(BooleanExpression condition) { public NumberExpression<Long> caseSumExpression(BooleanExpression condition) {
NumberExpression<Long> sumExp = new CaseBuilder().when(condition).then(1L).otherwise(0L).sum(); return new CaseBuilder().when(condition).then(1L).otherwise(0L).sum();
return sumExp;
} }
public BooleanExpression fromDateEqExpression(DateTimePath<ZonedDateTime> path, int addDayCnt) { public BooleanExpression fromDateEqExpression(DateTimePath<ZonedDateTime> path, int addDayCnt) {
LocalDate threeDaysAgo = LocalDate.now().plusDays(addDayCnt); LocalDate threeDaysAgo = LocalDate.now().plusDays(addDayCnt);
String toDate = threeDaysAgo.format(DateTimeFormatter.ofPattern("YYYY-MM-DD")); String toDate = threeDaysAgo.format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
BooleanExpression condition = return Expressions.stringTemplate("to_char({0}, 'yyyy-MM-dd')", path).eq(toDate);
Expressions.stringTemplate("to_char({0}, 'YYYY-MM-DD')", path).eq(toDate);
return condition;
} }
} }

View File

@@ -4,6 +4,7 @@ import com.kamco.cd.kamcoback.model.dto.ModelMngDto;
import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity; import com.kamco.cd.kamcoback.postgres.entity.ModelMngEntity;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
public interface ModelMngRepositoryCustom { public interface ModelMngRepositoryCustom {
@@ -16,4 +17,10 @@ public interface ModelMngRepositoryCustom {
String searchVal); String searchVal);
Optional<ModelMngEntity> findByModelUid(String modelVer); Optional<ModelMngEntity> findByModelUid(String modelVer);
Optional<ModelMngEntity> findByModelUuid(UUID uuid);
void insertModel(ModelMngDto.AddReq addReq);
void deleteByModelUuid(UUID uuid);
} }

View File

@@ -14,6 +14,7 @@ import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberPath; import com.querydsl.core.types.dsl.NumberPath;
import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.validation.Valid;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.LocalDateTime; import java.time.LocalDateTime;
@@ -22,6 +23,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@@ -67,8 +69,6 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.select( .select(
Projections.constructor( Projections.constructor(
ModelMngDto.ModelList.class, ModelMngDto.ModelList.class,
Expressions.numberTemplate(
Integer.class, "row_number() over(order by {0} desc)", sortColumn),
modelMngEntity.modelVer, modelMngEntity.modelVer,
modelMngEntity.fileName, modelMngEntity.fileName,
modelMngEntity.modelType, modelMngEntity.modelType,
@@ -80,7 +80,8 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
roundNumericToPercent(modelResultMetricEntity.loss), roundNumericToPercent(modelResultMetricEntity.loss),
roundNumericToPercent(modelResultMetricEntity.iou), roundNumericToPercent(modelResultMetricEntity.iou),
modelMngEntity.memo, modelMngEntity.memo,
modelMngEntity.deleted)) modelMngEntity.deleted,
modelMngEntity.uuid))
.from(modelMngEntity) .from(modelMngEntity)
.innerJoin(modelResultMetricEntity) .innerJoin(modelResultMetricEntity)
.on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid)) .on(modelMngEntity.modelUid.eq(modelResultMetricEntity.modelUid))
@@ -116,6 +117,15 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
.fetchOne()); .fetchOne());
} }
@Override
public Optional<ModelMngEntity> findByModelUuid(UUID uuid) {
System.out.println("uuid == " + uuid);
return Optional.ofNullable(
queryFactory.selectFrom(modelMngEntity).where(modelMngEntity.uuid.eq(uuid)).fetchOne());
}
private BooleanExpression eventEndedAtBetween( private BooleanExpression eventEndedAtBetween(
LocalDate startDate, LocalDate endDate, String sortColumn) { LocalDate startDate, LocalDate endDate, String sortColumn) {
if (Objects.isNull(startDate) || Objects.isNull(endDate)) { if (Objects.isNull(startDate) || Objects.isNull(endDate)) {
@@ -155,4 +165,36 @@ public class ModelMngRepositoryImpl extends QuerydslRepositorySupport
private Expression<BigDecimal> roundNumericToPercent(NumberPath<Double> ratio) { private Expression<BigDecimal> roundNumericToPercent(NumberPath<Double> ratio) {
return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio); return Expressions.numberTemplate(BigDecimal.class, "function('round', {0} * 100, 2)", ratio);
} }
@Override
public void insertModel(@Valid ModelMngDto.AddReq addReq) {
long execCount =
queryFactory
.insert(modelMngEntity)
.columns(
modelMngEntity.modelVer,
modelMngEntity.modelType,
modelMngEntity.filePath,
modelMngEntity.fileName,
modelMngEntity.memo,
modelMngEntity.uuid)
.values(
addReq.getModelVer(),
addReq.getModelType(),
addReq.getFilePath(),
addReq.getFileName(),
addReq.getMemo(),
addReq.getUuid())
.execute();
}
@Override
public void deleteByModelUuid(UUID uuid) {
long execCount =
queryFactory
.update(modelMngEntity)
.set(modelMngEntity.deleted, true)
.where(modelMngEntity.uuid.eq(uuid))
.execute();
}
} }

View File

@@ -0,0 +1,7 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
import org.springframework.data.jpa.repository.JpaRepository;
public interface UploadSessionRepository
extends JpaRepository<UploadSessionEntity, String>, UploadSessionRepositoryCustom {}

View File

@@ -0,0 +1,15 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import java.util.UUID;
public interface UploadSessionRepositoryCustom {
void insertUploadSession(UploadDto.UploadAddReq addReq);
UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi);
UploadDto.uploadDto findByUuid(UUID uuid);
void updateUploadSessionStatus(UploadDto.UploadAddReq addReq);
}

View File

@@ -0,0 +1,132 @@
package com.kamco.cd.kamcoback.postgres.repository.upload;
import static com.kamco.cd.kamcoback.postgres.entity.QUploadSessionEntity.uploadSessionEntity;
import com.kamco.cd.kamcoback.postgres.entity.UploadSessionEntity;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import java.util.UUID;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
public class UploadSessionRepositoryImpl extends QuerydslRepositorySupport
implements UploadSessionRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final StringExpression NULL_STRING = Expressions.stringTemplate("cast(null as text)");
@PersistenceContext private EntityManager em;
public UploadSessionRepositoryImpl(JPAQueryFactory queryFactory) {
super(UploadSessionEntity.class);
this.queryFactory = queryFactory;
}
@Override
public void insertUploadSession(UploadDto.UploadAddReq addReq) {
long execCnt =
queryFactory
.insert(uploadSessionEntity)
.columns(
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid)
.values(
addReq.getUploadId(),
addReq.getDatasetId(),
addReq.getFileName(),
addReq.getFileSize(),
addReq.getFinalPath(),
addReq.getStatus(),
addReq.getTempPath(),
addReq.getChunkIndex(),
addReq.getChunkTotalIndex(),
addReq.getUploadDivi(),
addReq.getFileHash(),
addReq.getUuid())
.execute();
}
@Override
public UploadDto.uploadDto findByDatasetUid(Long datasetId, String uploadDivi) {
UploadDto.uploadDto foundContent =
queryFactory
.select(
Projections.constructor(
UploadDto.uploadDto.class,
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(
uploadSessionEntity
.datasetId
.eq(datasetId)
.and(uploadSessionEntity.uploadDivi.eq(uploadDivi)))
.limit(1)
.fetchOne();
return foundContent;
}
@Override
public UploadDto.uploadDto findByUuid(UUID uuid) {
UploadDto.uploadDto foundContent =
queryFactory
.select(
Projections.constructor(
UploadDto.uploadDto.class,
uploadSessionEntity.uploadId,
uploadSessionEntity.datasetId,
uploadSessionEntity.fileName,
uploadSessionEntity.fileSize,
uploadSessionEntity.finalPath,
uploadSessionEntity.uploadDivi,
uploadSessionEntity.status,
uploadSessionEntity.tempPath,
uploadSessionEntity.chunkIndex,
uploadSessionEntity.chunkTotalIndex,
uploadSessionEntity.fileHash,
uploadSessionEntity.uuid))
.from(uploadSessionEntity)
.where(uploadSessionEntity.uuid.eq(uuid))
.limit(1)
.fetchOne();
return foundContent;
}
public void updateUploadSessionStatus(UploadDto.UploadAddReq addReq) {
long fileCount =
queryFactory
.update(uploadSessionEntity)
.set(uploadSessionEntity.chunkIndex, addReq.getChunkIndex())
.set(uploadSessionEntity.status, addReq.getStatus())
.where(uploadSessionEntity.uploadId.eq(addReq.getUploadId()))
.execute();
}
}

View File

@@ -0,0 +1,143 @@
package com.kamco.cd.kamcoback.upload;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.kamco.cd.kamcoback.upload.service.UploadService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
@Tag(name = "파일 업로드", description = "대용량 파일 업로드 API")
@RestController
@RequestMapping("/api/upload")
@RequiredArgsConstructor
public class UploadApiController {
private final UploadService uploadService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
/*
@Operation(summary = "데이터셋 대용량 업로드 세션 시작", description = "데이터셋 대용량 파일 업로드 세션을 시작합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "세션 생성 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = UploadDto.InitRes.class))),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/chunk-upload-init")
public ApiResponseDto<DmlReturn> initUpload(
@RequestBody @Valid UploadDto.InitReq initReq) {
return ApiResponseDto.createOK(uploadService.initUpload(initReq));
}
*/
@Operation(summary = "데이터셋 대용량 파일 분할 전송", description = "데이터셋 파일 대용량 파일을 청크 단위로 전송합니다.")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "청크 업로드 성공", content = @Content),
@ApiResponse(responseCode = "400", description = "잘못된 요청 데이터", content = @Content),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping(value = "/file-chunk-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public ApiResponseDto<UploadDto.UploadRes> fileChunkUpload(
@RequestParam("uuid") UUID uuid,
@RequestParam("fileName") String fileName,
@RequestParam("fileSize") long fileSize,
// @RequestParam("fileHash") String fileHash,
@RequestParam("chunkIndex") Integer chunkIndex,
@RequestParam("chunkTotalIndex") Integer chunkTotalIndex,
@RequestPart("chunkFile") MultipartFile chunkFile) {
String uploadDivi = "dataset";
UploadDto.UploadAddReq upAddReqDto = new UploadDto.UploadAddReq();
upAddReqDto.setDatasetId(0L);
upAddReqDto.setUuid(uuid);
upAddReqDto.setFileName(fileName);
upAddReqDto.setFileSize(fileSize);
upAddReqDto.setChunkIndex(chunkIndex);
upAddReqDto.setChunkTotalIndex(chunkTotalIndex);
upAddReqDto.setUploadDivi(uploadDivi);
upAddReqDto.setFinalPath(datasetDir);
upAddReqDto.setTempPath(datasetTmpDir);
System.out.println("uuid === " + uuid);
return ApiResponseDto.ok(uploadService.uploadChunk(upAddReqDto, chunkFile));
}
@Operation(summary = "업로드 완료된 파일 병합", description = "업로드 완료 및 파일 병합을 요청합니다.")
@ApiResponses(
value = {
@ApiResponse(responseCode = "200", description = "업로드 완료 성공", content = @Content),
@ApiResponse(responseCode = "400", description = "잘못된 요청", content = @Content),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PutMapping("/chunk-upload-complete/{uuid}")
public ApiResponseDto<UploadDto.UploadRes> completeUpload(@PathVariable UUID uuid) {
return ApiResponseDto.ok(uploadService.completeUpload(uuid));
}
/*
@Operation(summary = "업로드 상태 조회", description = "업로드 진행 상태를 조회합니다.")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = UploadDto.Status.class))),
@ApiResponse(responseCode = "404", description = "업로드 세션을 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/status")
public ApiResponseDto<UploadDto.Status> getUploadStatus(
@RequestBody @Valid UploadDto.StatusReq statusReq) {
return ApiResponseDto.ok(uploadService.getUploadStatus(statusReq));
}
*/
}

View File

@@ -0,0 +1,218 @@
package com.kamco.cd.kamcoback.upload.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotBlank;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
public class UploadDto {
@Schema(name = "InitReq", description = "업로드(Chunk) 세션 초기화")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class InitReq {
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "총 청크 수", example = "100")
private Integer chunkTotalIndex;
/*
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
private String fileHash;
*/
@Schema(description = "업로드구분", example = "model")
private String uploadDivi;
}
@Schema(name = "UploadAddReq", description = "업로드 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class UploadAddReq {
@Schema(description = "업로드 ID", example = "각데이터의 식별키")
private String uploadId;
@Schema(description = "데이터식별키", example = "129227333")
private Long datasetId;
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "파일명", example = "data.zip")
private String finalPath;
@Schema(description = "업로드구분", example = "dataset")
private String uploadDivi;
@Schema(description = "상태", example = "UPLOADING")
private String status;
@Schema(description = "임시저장경로")
private String tempPath;
@Schema(description = "업로드 청크 Index", example = "50")
private Integer chunkIndex;
@Schema(description = "총 청크 수", example = "100")
private Integer chunkTotalIndex;
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
private String fileHash;
@Schema(description = "uuid", example = "303d4e24-1726-4272-bbc7-01ab85692b80")
private UUID uuid;
}
@Schema(name = "UploadCompleteReq", description = "업로드 완료 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class CompleteReq {
@NotBlank(message = "업로드 ID는 필수입니다")
@Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123")
private String uploadId;
}
@Schema(name = "UploadStatusReq", description = "업로드 상태 조회 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class StatusReq {
@NotBlank(message = "업로드 ID는 필수입니다")
@Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123")
private String uploadId;
}
@Schema(name = "UploadStatus", description = "업로드 상태 정보")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class Status {
@Schema(description = "업로드 ID", example = "upload_20241218_123456_abc123")
private String uploadId;
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "상태", example = "UPLOADING")
private String status;
@Schema(description = "총 청크 수", example = "100")
private Integer totalChunks;
@Schema(description = "업로드된 청크 수", example = "50")
private Integer uploadedChunks;
@Schema(description = "진행률 (%)", example = "50.0")
private Double progress;
@Schema(description = "에러 메시지", example = "")
private String errorMessage;
}
@Schema(name = "UploadAddReq", description = "업로드 요청")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class uploadDto {
@Schema(description = "업로드 ID", example = "각데이터의 식별키")
private String uploadId;
@Schema(description = "데이터식별키", example = "129227333")
private Long datasetId;
@Schema(description = "파일명", example = "data.zip")
private String fileName;
@Schema(description = "파일 크기 (bytes)", example = "10737418240")
private Long fileSize;
@Schema(description = "파일명", example = "data.zip")
private String finalPath;
@Schema(description = "업로드구분", example = "dataset")
private String uploadDivi;
@Schema(description = "상태", example = "UPLOADING")
private String status;
@Schema(description = "임시저장경로")
private String tempPath;
@Schema(description = "업로드 청크 Index", example = "50")
private Integer chunkIndex;
@Schema(description = "총 청크 Index", example = "100")
private Integer chunkTotalIndex;
@Schema(
description = "파일해쉬",
example = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
private String fileHash;
@Schema(description = "uuid")
private UUID uuid;
}
@Schema(name = "UploadRes", description = "업로드 수행 후 리턴")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class UploadRes {
private String res;
private String resMsg;
private UUID uuid;
private String filePath;
private String fileName;
private int chunkIndex;
private int chunkTotalIndex;
public double getUploadRate() {
if (this.chunkTotalIndex == 0) {
return 0.0;
}
return (double) (this.chunkIndex + 1) / (this.chunkTotalIndex + 1) * 100.0;
}
}
@Schema(name = "DmlReturn", description = "수행 후 리턴")
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class DmlReturn {
private String flag;
private String message;
}
}

View File

@@ -0,0 +1,237 @@
package com.kamco.cd.kamcoback.upload.service;
import com.kamco.cd.kamcoback.common.enums.FileUploadStatus;
import com.kamco.cd.kamcoback.common.utils.FIleChecker;
import com.kamco.cd.kamcoback.postgres.core.UploadSessionCoreService;
import com.kamco.cd.kamcoback.upload.dto.UploadDto;
import com.kamco.cd.kamcoback.upload.dto.UploadDto.DmlReturn;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
@Slf4j
@Service
@RequiredArgsConstructor
public class UploadService {
private final UploadSessionCoreService uploadSessionCoreService;
@Value("${file.sync-root-dir}")
private String syncRootDir;
@Value("${file.sync-tmp-dir}")
private String syncTmpDir;
@Value("${file.sync-file-extention}")
private String syncFileExtention;
@Value("${file.dataset-dir}")
private String datasetDir;
@Value("${file.dataset-tmp-dir}")
private String datasetTmpDir;
@Value("${file.model-dir}")
private String modelDir;
@Value("${file.model-tmp-dir}")
private String modelTmpDir;
@Transactional
public DmlReturn initUpload(UploadDto.InitReq initReq) {
return new DmlReturn("success", "UPLOAD CHUNK INIT");
}
@Transactional
public UploadDto.UploadRes uploadChunk(UploadDto.UploadAddReq upAddReqDto, MultipartFile file) {
UploadDto.UploadRes upRes = new UploadDto.UploadRes();
long datasetId = 0;
if (upAddReqDto.getDatasetId() != null) datasetId = upAddReqDto.getDatasetId();
String uploadId = System.currentTimeMillis() + "";
// UUID uuid = UUID.randomUUID();
UUID uuid = upAddReqDto.getUuid();
String tmpDataSetDir = upAddReqDto.getTempPath() + uuid;
String fianlDir = upAddReqDto.getFinalPath() + uuid;
String uploadDivi = upAddReqDto.getUploadDivi();
// String fileName = file.getOriginalFilename();
String fileName = upAddReqDto.getFileName();
Integer chunkIndex = upAddReqDto.getChunkIndex();
Integer chunkTotalIndex = upAddReqDto.getChunkTotalIndex();
String status = FileUploadStatus.INIT.name();
upRes.setUuid(uuid);
upRes.setFilePath(fianlDir);
upRes.setFileName(fileName);
upAddReqDto.setUuid(uuid);
upAddReqDto.setUploadId(uploadId);
upAddReqDto.setStatus(status);
upAddReqDto.setFileName(fileName);
upAddReqDto.setTempPath(tmpDataSetDir);
upAddReqDto.setFinalPath(fianlDir);
// 세션 신규,중복체크(초기화 포함)
UploadDto.uploadDto dto = this.checkUploadSession(upAddReqDto, upRes);
if (!upRes.getRes().equals("success")) return upRes;
status = FileUploadStatus.UPLOADING.name();
upAddReqDto.setStatus(status);
if (dto != null) {
tmpDataSetDir = dto.getTempPath();
fianlDir = dto.getFinalPath();
}
// 폴더 생성 및 체크
if (!checkChunkFoler(upRes, tmpDataSetDir, fianlDir)) return upRes;
// chunk저장하기
if (!FIleChecker.multipartChunkSaveTo(file, tmpDataSetDir, chunkIndex)) {
upRes.setRes("fail");
upRes.setResMsg("chunkIndex:" + chunkIndex + " 업로드 애러");
}
if (chunkIndex == chunkTotalIndex) {
upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setStatus(FileUploadStatus.DONE.name());
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
try {
this.mergeChunks(tmpDataSetDir, fianlDir, fileName, chunkTotalIndex);
upAddReqDto.setUploadId(dto.getUploadId());
upAddReqDto.setStatus("MERGED");
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
} catch (IOException e) {
// throw new RuntimeException(e);
upRes.setRes("fail");
upRes.setResMsg("파일Chunk 병합(merge) 애러");
return upRes;
}
}
return upRes;
}
@Transactional
public UploadDto.UploadRes completeUpload(UUID uuid) {
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(uuid);
UploadDto.UploadRes upRes = new UploadDto.UploadRes();
upRes.setRes("success");
upRes.setResMsg("병합(merge) 정상처리되었습니다.");
upRes.setUuid(uuid);
upRes.setFilePath(dto.getFinalPath());
upRes.setFileName(dto.getFileName());
upRes.setChunkIndex(dto.getChunkIndex());
upRes.setChunkTotalIndex(dto.getChunkTotalIndex());
try {
this.mergeChunks(
dto.getTempPath(), dto.getFinalPath(), dto.getFileName(), dto.getChunkTotalIndex());
} catch (IOException e) {
upRes.setRes("fail");
upRes.setResMsg("병합(merge) 애러");
return upRes;
}
return upRes;
}
public boolean checkChunkFoler(UploadDto.UploadRes upRes, String tmpDataSetDir, String fianlDir) {
if (!FIleChecker.mkDir(tmpDataSetDir)) {
upRes.setRes("fail");
upRes.setRes("CHUNK 폴더 생성 ERROR");
return false;
}
if (!FIleChecker.mkDir(fianlDir)) {
upRes.setRes("fail");
upRes.setRes("업로드 완료 폴더 생성 ERROR");
return false;
}
return true;
}
public UploadDto.uploadDto checkUploadSession(
UploadDto.UploadAddReq upAddReqDto, UploadDto.UploadRes upRes) {
upRes.setRes("success");
upRes.setResMsg("정상처리되었습니다.");
UploadDto.uploadDto dto = uploadSessionCoreService.findByUuid(upAddReqDto.getUuid());
if (upAddReqDto.getChunkIndex() == 0) {
if (dto != null) {
upRes.setRes("duplicate");
upRes.setResMsg("이미 진행중인 업로드세션입니다.");
return dto;
}
upAddReqDto.setStatus("UPLOADING");
upRes.setUuid(upAddReqDto.getUuid());
uploadSessionCoreService.createUploadSession(upAddReqDto);
} else {
if (dto == null) {
upRes.setRes("nosession");
upRes.setResMsg("업로드 세션이 존재하지 않습니다.");
return dto;
}
upAddReqDto.setStatus("UPLOADING");
upAddReqDto.setUploadId(dto.getUploadId());
uploadSessionCoreService.updateUploadSessionStatus(upAddReqDto);
}
if (dto != null) upRes.setUuid(dto.getUuid());
upRes.setChunkIndex(upAddReqDto.getChunkIndex());
upRes.setChunkTotalIndex(upAddReqDto.getChunkTotalIndex());
return dto;
}
public void mergeChunks(String tmpDir, String fianlDir, String fileName, int chunkTotalIndex)
throws IOException {
Path outputPath = Paths.get(fianlDir, fileName);
try (FileChannel outChannel =
FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
for (int i = 0; i <= chunkTotalIndex; i++) {
Path chunkPath = Paths.get(tmpDir, i + "");
try (FileChannel inChannel = FileChannel.open(chunkPath, StandardOpenOption.READ)) {
long transferred = 0;
long size = inChannel.size();
while (transferred < size) {
transferred += inChannel.transferTo(transferred, size - transferred, outChannel);
}
}
// 병합 후 즉시 삭제하여 디스크 공간 확보
Files.delete(chunkPath);
}
}
// 병합후 임시 폴더 삭제
FIleChecker.deleteFolder(tmpDir);
}
}

View File

@@ -67,7 +67,14 @@ management:
file: file:
#sync-root-dir: D:/app/original-images/ #sync-root-dir: D:/app/original-images/
# sync-root-dir: /app/original-images/ sync-root-dir: /app/original-images/
sync-root-dir: /kamco-nfs/images/
sync-tmp-dir: ${file.sync-root-dir}/tmp sync-tmp-dir: ${file.sync-root-dir}/tmp
sync-file-extention: tfw,tif sync-file-extention: tfw,tif
#dataset-dir: D:/app/dataset/
dataset-dir: /app/dataset/
dataset-tmp-dir: ${file.dataset-dir}tmp/
#model-dir: D:/app/model/
model-dir: /app/model/
model-tmp-dir: ${file.model-dir}tmp/

View File

@@ -0,0 +1,137 @@
<!DOCTYPE html>
<html lang="ko">
<head>
<meta charset="UTF-8">
<title>Chunk Upload Test</title>
</head>
<body>
<h2>대용량 파일 청크 업로드 테스트</h2>
* Chunk 테스트 사이즈 10M (10 * 1024 * 1024) - 성능에 따라 변경가능<br><br>
* 업로드 API선택</br></br>
<select name="apiUrl" id="apiUrl" style="width:600px;height:40px;">
<option value="/api/model/file-chunk-upload">모델파일Chunk업로드 ( /api/model/file-chunk-upload )</option>
<option value="/api/upload/file-chunk-upload">파일Chunk업로드(공통) ( /api/upload/file-chunk-upload )</option>
</select>
<br><br>
* 파일첨부<br><br>
<input type="file" id="chunkFile" style="height:40px;"><br><br>
<button onclick="startUpload()" style="height:40px;">업로드 시작</button>
<br><br><br><br>
* 업로드시 업로드 이력을 추적하기 위해 UUID생성해서 전달(파일병합시 사용)(script 예제참고)</br></br>
UUID : <input id="uuid" name="uuid" value="" style="width:300px;height:30px;" readonly><br><br>
* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)</br></br>
chunkIndex : <input style="height:30px;" id="chunkIndex" placeholder="chunkIndex" readonly><br><br>
chunkTotalIndex : <input style="height:30px;" id="chunkTotalIndex" placeholder="chunkTotalIndex" readonly ><br><br>
* API 호출시 파일정보 추출해서 자동 할당해야 함.(script 예제참고)</br></br>
fileSize : <input style="height:30px;" id="fileSize" placeholder="fileSize" readonly><br><br>
<!--
fileHash : <input id="fileHash" placeholder="fileHash"><br><br> -->
<br><br>
* 진행율(%)</br></br>
<div style="width:500px;height:30px;border:1px solid #cccccc;"><div id="prgssbar" style="width:100%;height:30px;background:#eeeeee;"></div></div>
<br><br>
* 결과메세지</br></br>
<div id="status" style="padding:20px;width:800px;height:300px;border:1px solid #000000;"></div>
<script>
async function startUpload() {
const apiUrl = document.getElementById('apiUrl').value;
const file = document.getElementById('chunkFile').files[0];
const fileName = file.name;
//const datasetUid = Number(document.getElementById('datasetUid').value);
//const chunkIndex = document.getElementById('chunkIndex').value;
if (!file) return alert("파일을 선택하세요.");
const CHUNK_SIZE = 10 * 1024 * 1024; // 5MB
const fileSize = file.size;
var totalChunks = Math.ceil(fileSize / CHUNK_SIZE);
const chunkTotalIndex = totalChunks - 1;
const uuid = crypto.randomUUID(); // 고유 ID 생성
//var uuid = "";
document.getElementById('uuid').value = uuid;
document.getElementById('fileSize').value = file.size;
document.getElementById('chunkTotalIndex').value = chunkTotalIndex;
for (let i = 0; i < totalChunks; i++) {
//for (let i = 0; i < 1; i++) {
const start = i * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
document.getElementById('chunkIndex').value = i;
const formData = new FormData();
formData.append("uuid", uuid);
formData.append("fileSize", fileSize);
formData.append("fileName", fileName);
formData.append("chunkIndex", i);
formData.append("chunkTotalIndex", chunkTotalIndex);
formData.append("chunkFile", chunk);
try {
const response = await fetch(apiUrl, { method: 'POST', body: formData });
// 2. 응답 상태 확인 (200 OK 등)
if (!response.ok) {
throw new Error(`서버 에러: ${response.status}`);
}
// 3. 서버가 보낸 데이터 읽기 (JSON 형태라고 가정)
const result = await response.json();
document.getElementById('status').innerText = JSON.stringify(result, null, 2);
if( result.data.res != "success")
{
//오류 경고창 띄우는 것으로 처리하시면 됩니다.
break;
}
document.getElementById('prgssbar').style.width = result.data.uploadRate+"%";
} catch (error) {
console.error(`${i}번째 청크 업로드 실패:`, error);
break; // 오류 발생 시 중단
}
}
// 모든 청크 전송 후 최종 완료 요청
//var mergeResult = await completeUpload(uuid);
//document.getElementById('status').innerText = JSON.stringify(mergeResult, null, 2);
}
async function completeUpload(uuid) {
try {
const response = await fetch(`/api/upload/chunk-upload-complete/${uuid}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json'
},
});
if (!response.ok) {
throw new Error(`서ver 응답 에러: ${response.status}`);
}
const result = await response.json();
return result;
} catch (error) {
console.error("완료 요청 중 오류 발생:", error);
}
}
</script>
</body>
</html>