추론 실행 영구제외 조건 추가

추론 결과 UID 앞 8자리 추가
종료->완료, 종료->강제종료 상태 변경
This commit is contained in:
2026-01-21 19:05:03 +09:00
parent e70307b753
commit c70abbdb6d
11 changed files with 159 additions and 29 deletions

View File

@@ -558,11 +558,11 @@ public class InferenceResultService {
externalHttpClient.call(url, HttpMethod.DELETE, dto, headers, String.class);
if (!result.success()) {
log.warn("Failed to delete inference result");
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
}
SaveInferenceAiDto request = new SaveInferenceAiDto();
request.setStatus(Status.END.getId());
request.setStatus(Status.FORCED_END.getId());
request.setUuid(dto.getUuid());
request.setUpdateUid(userUtil.getId());
request.setInferEndDttm(ZonedDateTime.now());

View File

@@ -1,7 +1,12 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@@ -13,13 +18,49 @@ import org.springframework.transaction.annotation.Transactional;
public class InferenceResultShpService {
private final InferenceResultShpCoreService coreService;
private final InferenceResultCoreService inferenceResultCoreService;
private final ShpPipelineService shpPipelineService;
@Value("${mapsheet.shp.baseurl}")
private String baseDir;
@Value("${inference.jar-path}")
private String jarPath;
@Value("${file.dataset-dir}")
private String datasetDir;
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
@Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(Long id) {
return coreService.buildInferenceData(id);
}
public void createShp(String uid, Long m1BatchId, Long m2BatchId, Long m3BatchId) {
List<Long> batchIds = new ArrayList<>();
batchIds.add(m1BatchId);
batchIds.add(m2BatchId);
batchIds.add(m3BatchId);
List<InferenceResultsTestingDto.ShpDto> resultList =
inferenceResultCoreService.getInferenceResults(batchIds);
String inferenceId = "";
StringBuilder sb = new StringBuilder();
for (InferenceResultsTestingDto.ShpDto dto : resultList) {
if (dto.getMapId() == null) {
continue;
}
if (!sb.isEmpty()) {
sb.append(",");
}
sb.append("\"").append(dto.getMapId()).append("\"");
}
inferenceId = uid;
String mapIds = sb.toString();
String batchId = m1BatchId + "," + m2BatchId + "," + m3BatchId;
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, inferenceId, mapIds);
}
}