납품 데이터 등록 api 추가 #169

Merged
teddy merged 1 commits from feat/training_260324 into develop 2026-03-25 12:30:51 +09:00
4 changed files with 215 additions and 2 deletions

View File

@@ -1,7 +1,9 @@
package com.kamco.cd.training.dataset; package com.kamco.cd.training.dataset;
import com.kamco.cd.training.config.api.ApiResponseDto; import com.kamco.cd.training.config.api.ApiResponseDto;
import com.kamco.cd.training.config.api.ApiResponseDto.ResponseObj;
import com.kamco.cd.training.dataset.dto.DatasetDto; import com.kamco.cd.training.dataset.dto.DatasetDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.AddDeliveriesReq;
import com.kamco.cd.training.dataset.dto.DatasetObjDto; import com.kamco.cd.training.dataset.dto.DatasetObjDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetClass; import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetClass;
import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetStorage; import com.kamco.cd.training.dataset.dto.DatasetObjDto.DatasetStorage;
@@ -269,4 +271,22 @@ public class DatasetApiController {
return ApiResponseDto.createOK(datasetService.getFolderAll(srchDto)); return ApiResponseDto.createOK(datasetService.getFolderAll(srchDto));
} }
@Operation(summary = "납품 학습데이터셋 등록", description = "납품 학습데이터셋 등록 API")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "등록 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "404", description = "조회 오류", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/deliveries")
public ApiResponseDto<ResponseObj> insertDeliveriesDataset(@RequestBody AddDeliveriesReq req) throws IOException {
return ApiResponseDto.createOK(datasetService.insertDeliveriesDataset(req));
}
} }

View File

@@ -532,4 +532,13 @@ public class DatasetDto {
private Long totalObjectCount; private Long totalObjectCount;
private String datasetPath; private String datasetPath;
} }
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class AddDeliveriesReq {
@Schema(description = "경로", example = "/")
private String filePath;
}
} }

View File

@@ -11,6 +11,7 @@ import com.kamco.cd.training.common.utils.FIleChecker;
import com.kamco.cd.training.config.api.ApiResponseDto.ApiResponseCode; import com.kamco.cd.training.config.api.ApiResponseDto.ApiResponseCode;
import com.kamco.cd.training.config.api.ApiResponseDto.ResponseObj; import com.kamco.cd.training.config.api.ApiResponseDto.ResponseObj;
import com.kamco.cd.training.dataset.dto.DatasetDto; import com.kamco.cd.training.dataset.dto.DatasetDto;
import com.kamco.cd.training.dataset.dto.DatasetDto.AddDeliveriesReq;
import com.kamco.cd.training.dataset.dto.DatasetDto.AddReq; import com.kamco.cd.training.dataset.dto.DatasetDto.AddReq;
import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetMngRegDto; import com.kamco.cd.training.dataset.dto.DatasetDto.DatasetMngRegDto;
import com.kamco.cd.training.dataset.dto.DatasetObjDto; import com.kamco.cd.training.dataset.dto.DatasetObjDto;
@@ -21,6 +22,8 @@ import com.kamco.cd.training.dataset.dto.DatasetObjDto.SearchReq;
import com.kamco.cd.training.model.dto.FileDto.FoldersDto; import com.kamco.cd.training.model.dto.FileDto.FoldersDto;
import com.kamco.cd.training.model.dto.FileDto.SrchFoldersDto; import com.kamco.cd.training.model.dto.FileDto.SrchFoldersDto;
import com.kamco.cd.training.postgres.core.DatasetCoreService; import com.kamco.cd.training.postgres.core.DatasetCoreService;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@@ -56,10 +59,12 @@ import org.springframework.transaction.annotation.Transactional;
@Slf4j @Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@Transactional
public class DatasetService { public class DatasetService {
private final DatasetCoreService datasetCoreService; private final DatasetCoreService datasetCoreService;
@PersistenceContext
private EntityManager em;
private final ObjectMapper mapper = new ObjectMapper();
@Value("${file.dataset-dir}") @Value("${file.dataset-dir}")
private String datasetDir; private String datasetDir;
@@ -139,6 +144,7 @@ public class DatasetService {
return datasetCoreService.searchDatasetObjectList(searchReq); return datasetCoreService.searchDatasetObjectList(searchReq);
} }
@Transactional
public UUID deleteDatasetObjByUuid(UUID uuid) { public UUID deleteDatasetObjByUuid(UUID uuid) {
return datasetCoreService.deleteDatasetObjByUuid(uuid); return datasetCoreService.deleteDatasetObjByUuid(uuid);
} }
@@ -606,4 +612,178 @@ public class DatasetService {
return new FoldersDto(canonicalPath, folderTotCnt, folderErrTotCnt, folders); return new FoldersDto(canonicalPath, folderTotCnt, folderErrTotCnt, folders);
} }
/**
* 납품 데이터 등록
* @param req 폴더경로, 메모
* @return 성공/실패 여부
*/
public ResponseObj insertDeliveriesDataset(AddDeliveriesReq req) {
long startTime = System.currentTimeMillis();
log.info("========== 납품 데이터셋 업로드 시작 ==========");
log.info("filePath: {}", req.getFilePath());
DatasetMngRegDto datasetMngRegDto = new DatasetMngRegDto();
String uid = UUID.randomUUID()
.toString()
.replace("-", "")
.toUpperCase();
datasetMngRegDto.setUid(uid);
datasetMngRegDto.setDataType("DELIVER");
datasetMngRegDto.setCompareYyyy(0);
datasetMngRegDto.setTargetYyyy(0);
datasetMngRegDto.setDatasetPath(req.getFilePath());
// 마스터 저장 (트랜잭션 내부)
Long datasetUid = datasetCoreService.insertDatasetMngData(datasetMngRegDto);
log.info("납품 Dataset 마스터 저장 완료. datasetUid: {}", datasetUid);
// 검증
validateTrainValTestDirs(req.getFilePath());
validateDirFileCount(req.getFilePath());
// 처리
processType(req.getFilePath(), datasetUid, "train");
processType(req.getFilePath(), datasetUid, "val");
processType(req.getFilePath(), datasetUid, "test");
datasetCoreService.updateDatasetUploadStatus(datasetUid);
log.info("========== 전체 완료. 총 소요시간: {} ms ==========",
System.currentTimeMillis() - startTime);
return new ResponseObj(ApiResponseCode.OK, "업로드 성공하였습니다.");
}
private void processType(String path, Long datasetUid, String type) {
long start = System.currentTimeMillis();
log.info("[{}] 데이터 처리 시작", type.toUpperCase());
List<Map<String, Object>> list = getUnzipDatasetFiles(path, type);
log.info("[{}] 파일 개수: {}", type.toUpperCase(), list.size());
int count = 0;
for (Map<String, Object> map : list) {
insertTrainTestData(map, datasetUid, type);
count++;
if (count % 1000 == 0 || count == list.size()) {
log.info("[{}] 진행건수: {}", type.toUpperCase(), count);
}
}
log.info("[{}] 완료. 총 {}건, 소요시간: {} ms",
type.toUpperCase(), count, System.currentTimeMillis() - start);
}
@Transactional
public void insertTrainTestData(Map<String, Object> map, Long datasetUid, String subDir) {
String comparePath = (String) map.get("input1");
String targetPath = (String) map.get("input2");
String labelPath = (String) map.get("label");
String geojsonPath = (String) map.get("geojson_path");
Object labelJson = map.get("label-json");
JsonNode json;
try {
if (labelJson instanceof JsonNode jn) {
json = jn;
} else {
json = mapper.readTree(labelJson.toString());
}
} catch (Exception e) {
throw new RuntimeException("label_json parse error", e);
}
String fileName = Paths.get(comparePath).getFileName().toString();
String[] fileNameStr = fileName.split("_");
String compareYyyy = fileNameStr[1];
String targetYyyy = fileNameStr[2];
String mapSheetNum = fileNameStr[3];
int batchSize = 500;
int i = 0;
if (json != null && json.path("features") != null && !json.path("features").isEmpty()) {
for (JsonNode feature : json.path("features")) {
JsonNode prop = feature.path("properties");
String compareClassCd = prop.path("before").asText(null);
String targetClassCd = prop.path("after").asText(null);
ObjectNode root = mapper.createObjectNode();
root.put("type", "FeatureCollection");
ArrayNode features = mapper.createArrayNode();
features.add(feature);
root.set("features", features);
DatasetObjRegDto objRegDto =
DatasetObjRegDto.builder()
.datasetUid(datasetUid)
.compareYyyy(Integer.parseInt(compareYyyy))
.compareClassCd(compareClassCd)
.targetYyyy(Integer.parseInt(targetYyyy))
.targetClassCd(targetClassCd)
.comparePath(comparePath)
.targetPath(targetPath)
.labelPath(labelPath)
.mapSheetNum(mapSheetNum)
.geojson(root)
.geojsonPath(geojsonPath)
.fileName(fileName)
.build();
// insert
if (subDir.equals("train")) {
datasetCoreService.insertDatasetObj(objRegDto);
} else if (subDir.equals("val")) {
datasetCoreService.insertDatasetValObj(objRegDto);
} else {
datasetCoreService.insertDatasetTestObj(objRegDto);
}
// batch flush
if (++i % batchSize == 0) {
em.flush();
em.clear();
}
}
}
}
} }

View File

@@ -15,7 +15,6 @@ import com.kamco.cd.training.postgres.entity.DatasetEntity;
import com.kamco.cd.training.postgres.entity.DatasetObjEntity; import com.kamco.cd.training.postgres.entity.DatasetObjEntity;
import com.kamco.cd.training.postgres.repository.dataset.DatasetObjRepository; import com.kamco.cd.training.postgres.repository.dataset.DatasetObjRepository;
import com.kamco.cd.training.postgres.repository.dataset.DatasetRepository; import com.kamco.cd.training.postgres.repository.dataset.DatasetRepository;
import jakarta.transaction.Transactional;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
@@ -24,6 +23,8 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionSynchronizationManager;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -230,10 +231,12 @@ public class DatasetCoreService
return datasetObjRepository.getFilePathByUUIDPathType(uuid, pathType); return datasetObjRepository.getFilePathByUUIDPathType(uuid, pathType);
} }
@Transactional
public void insertDatasetTestObj(DatasetObjRegDto objRegDto) { public void insertDatasetTestObj(DatasetObjRegDto objRegDto) {
datasetObjRepository.insertDatasetTestObj(objRegDto); datasetObjRepository.insertDatasetTestObj(objRegDto);
} }
@Transactional
public void updateDatasetUploadStatus(Long datasetUid) { public void updateDatasetUploadStatus(Long datasetUid) {
DatasetEntity entity = DatasetEntity entity =
datasetRepository datasetRepository
@@ -243,6 +246,7 @@ public class DatasetCoreService
entity.setStatus(LearnDataRegister.COMPLETED.getId()); entity.setStatus(LearnDataRegister.COMPLETED.getId());
} }
@Transactional
public void insertDatasetValObj(DatasetObjRegDto objRegDto) { public void insertDatasetValObj(DatasetObjRegDto objRegDto) {
datasetObjRepository.insertDatasetValObj(objRegDto); datasetObjRepository.insertDatasetValObj(objRegDto);
} }