diff --git a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java index 209205de..be9483a9 100644 --- a/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java +++ b/src/main/java/com/kamco/cd/kamcoback/inference/service/InferenceResultService.java @@ -96,7 +96,7 @@ public class InferenceResultService { // 기준년도 조회 List targetList = mapSheetMngCoreService.getHstMapSheetList(req.getTargetYyyy()); - req.setMapSheetNum(this.createdMngDto(req, targetList)); + req.setMapSheetNum(createdMngDto(req, targetList)); } else { // 부분 @@ -107,7 +107,7 @@ public class InferenceResultService { // 기준년도 조회 List targetList = mapSheetMngCoreService.getHstMapSheetList(req.getTargetYyyy(), mapTargetIds); - req.setMapSheetNum(this.createdMngDto(req, targetList)); + req.setMapSheetNum(createdMngDto(req, targetList)); } if (req.getMapSheetNum().isEmpty()) { @@ -116,7 +116,9 @@ public class InferenceResultService { // 추론 테이블 저장 UUID uuid = inferenceResultCoreService.saveInferenceInfo(req); - this.startInference(req, uuid); + + // 추론 실행 API 호출 + startInference(req, uuid); return uuid; } @@ -198,9 +200,9 @@ public class InferenceResultService { } String modelComparePath = - this.getSceneInference(String.valueOf(req.getCompareYyyy()), mapSheetNumList); + getSceneInference(String.valueOf(req.getCompareYyyy()), mapSheetNumList); String modelTargetPath = - this.getSceneInference(String.valueOf(req.getTargetYyyy()), mapSheetNumList); + getSceneInference(String.valueOf(req.getTargetYyyy()), mapSheetNumList); pred_requests_areas predRequestsAreas = new pred_requests_areas(); predRequestsAreas.setInput1_year(req.getCompareYyyy()); diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java index f4515641..e2b5bdf3 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/core/InferenceResultCoreService.java @@ -104,13 +104,13 @@ public class InferenceResultCoreService { buffer.add(e); if (buffer.size() == CHUNK) { - this.flushChunk(buffer); + flushChunk(buffer); buffer.clear(); } } if (!buffer.isEmpty()) { - this.flushChunk(buffer); + flushChunk(buffer); buffer.clear(); } diff --git a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetLearnRepositoryImpl.java b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetLearnRepositoryImpl.java index 4b04fbcc..7ebc5e54 100644 --- a/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetLearnRepositoryImpl.java +++ b/src/main/java/com/kamco/cd/kamcoback/postgres/repository/Inference/MapSheetLearnRepositoryImpl.java @@ -144,8 +144,9 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto return Optional.ofNullable( queryFactory .selectFrom(mapSheetLearnEntity) - .where(mapSheetLearnEntity.status.eq(status)) - .limit(1) + .where(mapSheetLearnEntity.id.eq(106L)) + // .where(mapSheetLearnEntity.status.eq(status)) + // .limit(1) .fetchOne()); } diff --git a/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobApiController.java b/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobApiController.java index 5865726a..88474717 100644 --- a/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobApiController.java +++ b/src/main/java/com/kamco/cd/kamcoback/scheduler/MapSheetMngFileJobApiController.java @@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.scheduler; import com.kamco.cd.kamcoback.code.dto.CommonCodeDto; import com.kamco.cd.kamcoback.code.service.CommonCodeService; import com.kamco.cd.kamcoback.config.api.ApiResponseDto; +import com.kamco.cd.kamcoback.scheduler.service.MapSheetInferenceJobService; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Schema; @@ -10,6 +11,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import lombok.RequiredArgsConstructor; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; @@ -23,6 +25,7 @@ public class MapSheetMngFileJobApiController { private final CommonCodeService commonCodeService; private final MapSheetMngFileJobController mapSheetMngFileJobController; + private final MapSheetInferenceJobService mapSheetInferenceJobService; @Operation(summary = "영상관리 파일 싱크 스캐쥴러 Start/Stop", description = "영상관리 파일 싱크 스캐쥴러 Start/Stop API") @ApiResponses( @@ -46,4 +49,22 @@ public class MapSheetMngFileJobApiController { return ApiResponseDto.createOK("OK"); } + + @GetMapping("/inference-batch") + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "실행 성공", + content = + @Content( + mediaType = "application/json", + schema = @Schema(implementation = String.class))), + @ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content), + @ApiResponse(responseCode = "500", description = "서버 오류", content = @Content) + }) + public ApiResponseDto inferenceRunBatch() { + // mapSheetInferenceJobService.runBatch(); + return ApiResponseDto.createOK("OK"); + } } diff --git a/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java b/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java index 93a119d4..36ad1059 100644 --- a/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java +++ b/src/main/java/com/kamco/cd/kamcoback/scheduler/service/MapSheetInferenceJobService.java @@ -48,9 +48,7 @@ public class MapSheetInferenceJobService { @Value("${inference.url}") private String inferenceUrl; - /** - * 추론 진행 배치 1분 - */ + /** 추론 진행 배치 1분 */ @Scheduled(fixedDelay = 60_000) @Transactional public void runBatch() { @@ -61,7 +59,7 @@ public class MapSheetInferenceJobService { try { InferenceBatchSheet batchSheet = - inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId()); + inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId()); if (batchSheet == null) { return; @@ -88,7 +86,7 @@ public class MapSheetInferenceJobService { String url = batchUrl + "/" + batchId; ExternalCallResult result = - externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class); + externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class); int status = result.statusCode(); if (status < 200 || status >= 300) { @@ -103,23 +101,23 @@ public class MapSheetInferenceJobService { int failedJobs = dto.getFailedJobs(); // 성공, 실패 값 더해서 total 과 같으면 완료 - String inferStatus = this.setStatus(totalJobs, completedJobs, failedJobs); + String inferStatus = setStatus(totalJobs, completedJobs, failedJobs); if ("COMPLETED".equals(inferStatus)) { String type = batchSheet.getRunningModelType(); if (type.equals("M1")) { // M1 완료되었으면 M2 실행 - this.startInference( - batchSheet.getId(), batchSheet.getUuid(), "M2", batchSheet.getM2ModelUuid()); + startInference( + batchSheet.getId(), batchSheet.getUuid(), "M2", batchSheet.getM2ModelUuid()); // 종료시간 - this.updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M1"); + updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M1"); } else if (type.equals("M2")) { // M2 완료되었으면 M3 실행 - this.startInference( - batchSheet.getId(), batchSheet.getUuid(), "M3", batchSheet.getM3ModelUuid()); + startInference( + batchSheet.getId(), batchSheet.getUuid(), "M3", batchSheet.getM3ModelUuid()); // 종료시간 - this.updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M2"); + updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M2"); } else if (type.equals("M3")) { // 완료 SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); @@ -129,7 +127,7 @@ public class MapSheetInferenceJobService { saveInferenceAiDto.setType(type); inferenceResultCoreService.update(saveInferenceAiDto); // 종료시간 - this.updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M3"); + updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M3"); } } else { SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); @@ -148,7 +146,7 @@ public class MapSheetInferenceJobService { private void startInference(Long id, UUID uuid, String type, UUID modelUuid) { InferenceProgressDto progressDto = - inferenceResultCoreService.getInferenceAiResultById(id, type, modelUuid); + inferenceResultCoreService.getInferenceAiResultById(id, type, modelUuid); String inferenceType = ""; @@ -164,24 +162,24 @@ public class MapSheetInferenceJobService { predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year()); predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year()); predRequestsAreas.setInput1_scene_path( - progressDto.getPred_requests_areas().getInput1_scene_path()); + progressDto.getPred_requests_areas().getInput1_scene_path()); predRequestsAreas.setInput2_scene_path( - progressDto.getPred_requests_areas().getInput2_scene_path()); + progressDto.getPred_requests_areas().getInput2_scene_path()); InferenceSendDto m = new InferenceSendDto(); m.setPred_requests_areas(predRequestsAreas); m.setModel_version(progressDto.getModelVersion()); m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName()); m.setCd_model_config( - progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName()); + progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName()); m.setCls_model_path( - progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName()); + progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName()); m.setCls_model_version(progressDto.getClsModelVersion()); m.setCd_model_type(inferenceType); m.setPriority(progressDto.getPriority()); // 추론 다음모델 실행 - Long batchId = this.ensureAccepted(m); + Long batchId = ensureAccepted(m); SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); saveInferenceAiDto.setUuid(uuid); @@ -217,7 +215,7 @@ public class MapSheetInferenceJobService { } ExternalCallResult result = - externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class); + externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class); int status = result.statusCode(); String body = result.body(); @@ -230,8 +228,7 @@ public class MapSheetInferenceJobService { try { List> list = - om.readValue(body, new TypeReference>>() { - }); + om.readValue(body, new TypeReference>>() {}); Integer batchIdInt = (Integer) list.get(0).get("batch_id"); batchId = batchIdInt.longValue();