[KC-103] 추론 실행 배치 수정

This commit is contained in:
2026-01-14 15:24:14 +09:00
parent 326591c4bd
commit 4d52e2a97a
5 changed files with 499 additions and 308 deletions

View File

@@ -23,7 +23,9 @@ import org.springframework.data.domain.Pageable;
public class InferenceResultDto { public class InferenceResultDto {
/** 목록조회 dto */ /**
* 목록조회 dto
*/
@Getter @Getter
@Setter @Setter
@AllArgsConstructor @AllArgsConstructor
@@ -35,11 +37,15 @@ public class InferenceResultDto {
private String status; private String status;
private String mapSheetCnt; private String mapSheetCnt;
private Long detectingCnt; private Long detectingCnt;
@JsonFormatDttm private ZonedDateTime startTime; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime endTime; private ZonedDateTime startTime;
@JsonFormatDttm private ZonedDateTime elapsedTime; @JsonFormatDttm
private ZonedDateTime endTime;
@JsonFormatDttm
private ZonedDateTime elapsedTime;
private Boolean applyYn; private Boolean applyYn;
@JsonFormatDttm private ZonedDateTime applyDttm; @JsonFormatDttm
private ZonedDateTime applyDttm;
@JsonProperty("statusName") @JsonProperty("statusName")
public String statusName() { public String statusName() {
@@ -47,7 +53,9 @@ public class InferenceResultDto {
} }
} }
/** 목록조회 검색 조건 dto */ /**
* 목록조회 검색 조건 dto
*/
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@@ -69,7 +77,9 @@ public class InferenceResultDto {
} }
} }
/** 탐지 데이터 옵션 dto */ /**
* 탐지 데이터 옵션 dto
*/
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum MapSheetScope implements EnumType { public enum MapSheetScope implements EnumType {
@@ -90,7 +100,9 @@ public class InferenceResultDto {
} }
} }
/** 분석대상 도엽 enum */ /**
* 분석대상 도엽 enum
*/
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum DetectOption implements EnumType { public enum DetectOption implements EnumType {
@@ -138,7 +150,9 @@ public class InferenceResultDto {
} }
} }
/** 변화탐지 실행 정보 저장 요청 정보 */ /**
* 변화탐지 실행 정보 저장 요청 정보
*/
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@@ -177,8 +191,8 @@ public class InferenceResultDto {
@Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL") @Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank @NotBlank
@EnumValid( @EnumValid(
enumClass = DetectOption.class, enumClass = DetectOption.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private String detectOption; private String detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]") @Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
@@ -205,41 +219,49 @@ public class InferenceResultDto {
private Integer targetYyyy; private Integer targetYyyy;
private String detectOption; private String detectOption;
private String mapSheetScope; private String mapSheetScope;
@JsonFormatDttm private ZonedDateTime inferStartDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime inferEndDttm; private ZonedDateTime inferStartDttm;
@JsonFormatDttm
private ZonedDateTime inferEndDttm;
private Long detectingCnt = 0L; private Long detectingCnt = 0L;
private Long detectingEndCnt = 0L; private Long detectingEndCnt = 0L;
@JsonFormatDttm private ZonedDateTime m1ModelStartDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime m1ModelEndDttm; private ZonedDateTime m1ModelStartDttm;
@JsonFormatDttm private ZonedDateTime m2ModelStartDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime m2ModelEndDttm; private ZonedDateTime m1ModelEndDttm;
@JsonFormatDttm private ZonedDateTime m3ModelStartDttm; @JsonFormatDttm
@JsonFormatDttm private ZonedDateTime m3ModelEndDttm; private ZonedDateTime m2ModelStartDttm;
@JsonFormatDttm
private ZonedDateTime m2ModelEndDttm;
@JsonFormatDttm
private ZonedDateTime m3ModelStartDttm;
@JsonFormatDttm
private ZonedDateTime m3ModelEndDttm;
private String model1Ver; private String model1Ver;
private String model2Ver; private String model2Ver;
private String model3Ver; private String model3Ver;
public InferenceStatusDetailDto( public InferenceStatusDetailDto(
String title, String title,
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
String detectOption, String detectOption,
String mapSheetScope, String mapSheetScope,
ZonedDateTime inferStartDttm, ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm, ZonedDateTime inferEndDttm,
Long detectingCnt, Long detectingCnt,
Long detectingEndCnt, Long detectingEndCnt,
ZonedDateTime m1ModelStartDttm, ZonedDateTime m1ModelStartDttm,
ZonedDateTime m1ModelEndDttm, ZonedDateTime m1ModelEndDttm,
ZonedDateTime m2ModelStartDttm, ZonedDateTime m2ModelStartDttm,
ZonedDateTime m2ModelEndDttm, ZonedDateTime m2ModelEndDttm,
ZonedDateTime m3ModelStartDttm, ZonedDateTime m3ModelStartDttm,
ZonedDateTime m3ModelEndDttm, ZonedDateTime m3ModelEndDttm,
String model1Ver, String model1Ver,
String model2Ver, String model2Ver,
String model3Ver) { String model3Ver) {
this.title = title; this.title = title;
this.compareYyyy = compareYyyy; this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy; this.targetYyyy = targetYyyy;
@@ -342,9 +364,12 @@ public class InferenceResultDto {
public static class InferenceServerStatusDto { public static class InferenceServerStatusDto {
private String serverName; private String serverName;
@JsonIgnore private float cpu_user; @JsonIgnore
@JsonIgnore private float cpu_system; private float cpu_user;
@JsonIgnore private float memused; @JsonIgnore
private float cpu_system;
@JsonIgnore
private float memused;
private Long kbmemused; private Long kbmemused;
private float gpuUtil; private float gpuUtil;
@@ -458,5 +483,9 @@ public class InferenceResultDto {
private ZonedDateTime modelEndDttm; private ZonedDateTime modelEndDttm;
private Long updateUid; private Long updateUid;
private String runningModelType; private String runningModelType;
private Integer pendingJobs;
private Integer runningJobs;
private Integer completedJobs;
private Integer failedJobs;
} }
} }

View File

@@ -106,7 +106,7 @@ public class InferenceResultService {
// 변화탐지 실행 가능 비교년도 조회 // 변화탐지 실행 가능 비교년도 조회
List<MngListCompareDto> compareList = List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds); mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
if (compareList.isEmpty()) { if (compareList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
@@ -140,6 +140,7 @@ public class InferenceResultService {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
} }
// 사용할 영상파일 년도 기록 및 추론에 포함되는지 설정
for (MngListDto target : targetList) { for (MngListDto target : targetList) {
for (Map<String, Object> map : totalNumList) { for (Map<String, Object> map : totalNumList) {
if (target.getMapSheetNum().equals(map.get("mapSheetNum").toString())) { if (target.getMapSheetNum().equals(map.get("mapSheetNum").toString())) {
@@ -154,20 +155,20 @@ public class InferenceResultService {
// 추론에 필요한 geojson 파일 생성 // 추론에 필요한 geojson 파일 생성
List<String> mapSheetNumList = List<String> mapSheetNumList =
targetList.stream() targetList.stream()
.filter(t -> Boolean.TRUE.equals(t.getIsSuccess())) .filter(t -> Boolean.TRUE.equals(t.getIsSuccess()))
.map(MngListDto::getMapSheetNum) .map(MngListDto::getMapSheetNum)
.toList(); .toList();
// 비교년도 geojson 파일 생성하여 경로 받기 // 비교년도 geojson 파일 생성하여 경로 받기
String modelComparePath = String modelComparePath =
getSceneInference( getSceneInference(
String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope()); String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope());
// 기준년도 geojson 파일 생성하여 경로 받기 // 기준년도 geojson 파일 생성하여 경로 받기
String modelTargetPath = String modelTargetPath =
getSceneInference( getSceneInference(
String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope()); String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope());
// ai 서버에 전달할 파라미터 생성 // ai 서버에 전달할 파라미터 생성
pred_requests_areas predRequestsAreas = new pred_requests_areas(); pred_requests_areas predRequestsAreas = new pred_requests_areas();
@@ -199,19 +200,19 @@ public class InferenceResultService {
// 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용 // 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용
private List<MapSheetNumDto> createdMngDto( private List<MapSheetNumDto> createdMngDto(
InferenceResultDto.RegReq req, List<MngListDto> targetList) { InferenceResultDto.RegReq req, List<MngListDto> targetList) {
List<String> mapTargetIds = new ArrayList<>(); List<String> mapTargetIds = new ArrayList<>();
targetList.forEach( targetList.forEach(
hstMapSheet -> { hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기 // 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum()); mapTargetIds.add(hstMapSheet.getMapSheetNum());
}); });
// 비교년도 조회 // 비교년도 조회
List<String> mapCompareIds = new ArrayList<>(); List<String> mapCompareIds = new ArrayList<>();
List<MngListCompareDto> compareList = List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds); mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
for (MngListCompareDto dto : compareList) { for (MngListCompareDto dto : compareList) {
// 추론 제외일때 이전년도 파일이 없으면 제외 // 추론 제외일때 이전년도 파일이 없으면 제외
@@ -226,35 +227,35 @@ public class InferenceResultService {
} }
Set<String> compareSet = Set<String> compareSet =
mapCompareIds.stream() mapCompareIds.stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지 .map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet()); .collect(Collectors.toSet());
// target 기준 compare 비교하여 서로 있는것만 저장 // target 기준 compare 비교하여 서로 있는것만 저장
List<String> commonIds = List<String> commonIds =
mapTargetIds.stream() mapTargetIds.stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(String::trim) .map(String::trim)
.filter(compareSet::contains) .filter(compareSet::contains)
.toList(); .toList();
Set<String> commonIdSet = Set<String> commonIdSet =
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet()); commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
// 저장하기위해 파라미터 다시 구성 // 저장하기위해 파라미터 다시 구성
List<MapSheetNumDto> mapSheetNum = List<MapSheetNumDto> mapSheetNum =
targetList.stream() targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null) .filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim())) .filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map( .map(
dto -> { dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto(); MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum()); mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName()); mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto; return mapSheetNumDto;
}) })
.toList(); .toList();
return mapSheetNum; return mapSheetNum;
} }
@@ -271,14 +272,14 @@ public class InferenceResultService {
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST); throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
} }
// 1) 요청 로그 (debug 권장) // 1) 요청 로그
try { try {
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto)); log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
log.warn("Failed to serialize inference dto", e); log.warn("Failed to serialize inference dto", e);
} }
// 2) local 환경 임시 처리 (NPE 방어) // 2) local 환경 임시 처리
if ("local".equals(profile)) { if ("local".equals(profile)) {
if (dto.getPred_requests_areas() == null) { if (dto.getPred_requests_areas() == null) {
throw new IllegalStateException("pred_requests_areas is null"); throw new IllegalStateException("pred_requests_areas is null");
@@ -293,7 +294,7 @@ public class InferenceResultService {
headers.setAccept(List.of(MediaType.APPLICATION_JSON)); headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result = ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class); externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
if (result.statusCode() < 200 || result.statusCode() >= 300) { if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body()); log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
@@ -303,7 +304,8 @@ public class InferenceResultService {
// 4) 응답 파싱 // 4) 응답 파싱
try { try {
List<Map<String, Object>> list = List<Map<String, Object>> list =
objectMapper.readValue(result.body(), new TypeReference<>() {}); objectMapper.readValue(result.body(), new TypeReference<>() {
});
if (list.isEmpty()) { if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty"); throw new IllegalStateException("Inference response is empty");
@@ -372,8 +374,8 @@ public class InferenceResultService {
/** /**
* geojson 파일 생성 * geojson 파일 생성
* *
* @param yyyy 영상관리 파일별 년도 * @param yyyy 영상관리 파일별 년도
* @param mapSheetNums 5k 도엽 번호 리스트 * @param mapSheetNums 5k 도엽 번호 리스트
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용 * @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
* @return * @return
*/ */
@@ -408,7 +410,7 @@ public class InferenceResultService {
* @return * @return
*/ */
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList( public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) { Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceResultGeomList(id, searchGeoReq); return inferenceResultCoreService.getInferenceResultGeomList(id, searchGeoReq);
} }
@@ -419,7 +421,7 @@ public class InferenceResultService {
* @return * @return
*/ */
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom( public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) { @NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) {
return inferenceResultCoreService.listInferenceResultWithGeom(id, searchReq); return inferenceResultCoreService.listInferenceResultWithGeom(id, searchReq);
} }
@@ -464,7 +466,7 @@ public class InferenceResultService {
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) { public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
List<InferenceServerStatusDto> servers = List<InferenceServerStatusDto> servers =
inferenceResultCoreService.getInferenceServerStatusList(); inferenceResultCoreService.getInferenceServerStatusList();
String serverNames = ""; String serverNames = "";
for (InferenceServerStatusDto server : servers) { for (InferenceServerStatusDto server : servers) {

View File

@@ -29,6 +29,7 @@ import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.function.Consumer;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2; import lombok.extern.log4j.Log4j2;
import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessException;
@@ -101,7 +102,7 @@ public class InferenceResultCoreService {
mapSheetLearnEntity.setMapSheetCnt(mapSheetName); mapSheetLearnEntity.setMapSheetCnt(mapSheetName);
mapSheetLearnEntity.setDetectingCnt((long) detectingCnt); mapSheetLearnEntity.setDetectingCnt((long) detectingCnt);
mapSheetLearnEntity.setStage( mapSheetLearnEntity.setStage(
mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy())); mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy()));
// learn 테이블 저장 // learn 테이블 저장
MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity); MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity);
@@ -163,9 +164,9 @@ public class InferenceResultCoreService {
*/ */
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) { public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceDetailDto.AnalResSummary summary = InferenceDetailDto.AnalResSummary summary =
mapSheetAnalDataRepository mapSheetAnalDataRepository
.getInferenceResultSummary(id) .getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id)); .orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary; return summary;
} }
@@ -186,7 +187,7 @@ public class InferenceResultCoreService {
* @return * @return
*/ */
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList( public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) { Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq); return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
} }
@@ -198,16 +199,16 @@ public class InferenceResultCoreService {
*/ */
@Transactional(readOnly = true) @Transactional(readOnly = true)
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom( public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) { @NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다. // 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds = List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId) .mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed() .boxed()
.toList(); .toList();
// 해당데이터의 폴리곤데이터를 가져온다 // 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities = Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq); mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity); return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
} }
@@ -224,108 +225,112 @@ public class InferenceResultCoreService {
@Transactional(readOnly = true) @Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) { public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes = List<String> sceneCodes =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum) .mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf) .mapToObj(String::valueOf)
.toList(); .toList();
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream() return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
.map(MapInkx5kEntity::toEntity) .map(MapInkx5kEntity::toEntity)
.toList(); .toList();
} }
/**
* learn 테이블 update
*
* @param request 추론 실행 정보
*/
public void update(SaveInferenceAiDto request) { public void update(SaveInferenceAiDto request) {
MapSheetLearnEntity entity = MapSheetLearnEntity entity =
mapSheetLearnRepository mapSheetLearnRepository.getInferenceResultByUuid(request.getUuid())
.getInferenceResultByUuid(request.getUuid()) .orElseThrow(EntityNotFoundException::new);
.orElseThrow(() -> new EntityNotFoundException());
// M1/M2/M3 영역 업데이트
if (request.getType() != null) { if (request.getType() != null) {
switch (request.getType()) { applyModelUpdate(entity, request);
case "M1" -> {
if (request.getBatchId() != null) {
entity.setM1ModelBatchId(request.getBatchId());
}
if (request.getModelStartDttm() != null) {
entity.setM1ModelStartDttm(request.getModelStartDttm());
}
if (request.getModelEndDttm() != null) {
entity.setM1ModelEndDttm(request.getModelEndDttm());
}
}
case "M2" -> {
if (request.getBatchId() != null) {
entity.setM2ModelBatchId(request.getBatchId());
}
if (request.getModelStartDttm() != null) {
entity.setM2ModelStartDttm(request.getModelStartDttm());
}
if (request.getModelEndDttm() != null) {
entity.setM2ModelEndDttm(request.getModelEndDttm());
}
}
case "M3" -> {
if (request.getBatchId() != null) {
entity.setM3ModelBatchId(request.getBatchId());
}
if (request.getModelStartDttm() != null) {
entity.setM3ModelStartDttm(request.getModelStartDttm());
}
if (request.getModelEndDttm() != null) {
entity.setM3ModelEndDttm(request.getModelEndDttm());
}
}
}
} }
if (request.getRunningModelType() != null) { // 공통 영역 업데이트
entity.setRunningModelType(request.getRunningModelType()); applyIfNotNull(request.getRunningModelType(), entity::setRunningModelType);
} applyIfNotNull(request.getInferStartDttm(), entity::setInferStartDttm);
applyIfNotNull(request.getInferEndDttm(), entity::setInferEndDttm);
if (request.getInferStartDttm() != null) { applyIfNotNull(request.getModelComparePath(), entity::setModelComparePath);
entity.setInferStartDttm(request.getInferStartDttm()); applyIfNotNull(request.getModelTargetPath(), entity::setModelTargetPath);
} applyIfNotNull(request.getDetectEndCnt(), entity::setDetectEndCnt);
applyIfNotNull(request.getStatus(), entity::setStatus);
if (request.getInferEndDttm() != null) { applyIfNotNull(request.getUpdateUid(), entity::setUpdatedUid);
entity.setInferEndDttm(request.getInferEndDttm());
}
if (request.getModelComparePath() != null) {
entity.setModelComparePath(request.getModelComparePath());
}
if (request.getModelTargetPath() != null) {
entity.setModelTargetPath(request.getModelTargetPath());
}
if (request.getDetectEndCnt() != null) {
entity.setDetectEndCnt(request.getDetectEndCnt());
}
if (request.getStatus() != null) {
entity.setStatus(request.getStatus());
}
if (request.getUpdateUid() != null) {
entity.setUpdatedUid(request.getUpdateUid());
}
entity.setUpdatedDttm(ZonedDateTime.now()); entity.setUpdatedDttm(ZonedDateTime.now());
} }
private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) {
switch (request.getType()) {
case "M1" -> applyModelFields(
request,
entity::setM1ModelBatchId,
entity::setM1ModelStartDttm,
entity::setM1ModelEndDttm,
entity::setM1PendingJobs,
entity::setM1RunningJobs,
entity::setM1CompletedJobs,
entity::setM1FailedJobs
);
case "M2" -> applyModelFields(
request,
entity::setM2ModelBatchId,
entity::setM2ModelStartDttm,
entity::setM2ModelEndDttm,
entity::setM2PendingJobs,
entity::setM2RunningJobs,
entity::setM2CompletedJobs,
entity::setM2FailedJobs
);
case "M3" -> applyModelFields(
request,
entity::setM3ModelBatchId,
entity::setM3ModelStartDttm,
entity::setM3ModelEndDttm,
entity::setM3PendingJobs,
entity::setM3RunningJobs,
entity::setM3CompletedJobs,
entity::setM3FailedJobs
);
default -> throw new IllegalArgumentException("Unknown type: " + request.getType());
}
}
private void applyModelFields(
SaveInferenceAiDto request,
Consumer<Long> setBatchId,
Consumer<ZonedDateTime> setStart,
Consumer<ZonedDateTime> setEnd,
Consumer<Integer> setPending,
Consumer<Integer> setRunning,
Consumer<Integer> setCompleted,
Consumer<Integer> setFailed
) {
applyIfNotNull(request.getBatchId(), setBatchId);
applyIfNotNull(request.getModelStartDttm(), setStart);
applyIfNotNull(request.getModelEndDttm(), setEnd);
applyIfNotNull(request.getPendingJobs(), setPending);
applyIfNotNull(request.getRunningJobs(), setRunning);
applyIfNotNull(request.getCompletedJobs(), setCompleted);
applyIfNotNull(request.getFailedJobs(), setFailed);
}
private <T> void applyIfNotNull(T value, Consumer<T> setter) {
if (value != null) {
setter.accept(value);
}
}
public List<InferenceServerStatusDto> getInferenceServerStatusList() { public List<InferenceServerStatusDto> getInferenceServerStatusList() {
return mapSheetLearnRepository.getInferenceServerStatusList(); return mapSheetLearnRepository.getInferenceServerStatusList();
} }
public InferenceBatchSheet getInferenceResultByStatus(String status) { public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity = MapSheetLearnEntity entity =
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null); mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
if (entity == null) { if (entity == null) {
return null; return null;
@@ -345,7 +350,14 @@ public class InferenceResultCoreService {
return inferenceBatchSheet; return inferenceBatchSheet;
} }
public InferenceProgressDto getInferenceAiResultById(Long id, String type, UUID modelUuid) { /**
* 추론 실행 api 파라미터 조회
*
* @param id
* @param modelUuid
* @return
*/
public InferenceProgressDto getInferenceAiResultById(Long id, UUID modelUuid) {
return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid); return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid);
} }
@@ -364,7 +376,7 @@ public class InferenceResultCoreService {
/** /**
* @param compareYear 비교년도 * @param compareYear 비교년도
* @param targetYear 기준년도 * @param targetYear 기준년도
* @return * @return
*/ */
public Integer getLearnStage(Integer compareYear, Integer targetYear) { public Integer getLearnStage(Integer compareYear, Integer targetYear) {

View File

@@ -34,7 +34,6 @@ import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions; import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression; import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression; import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory; import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException; import jakarta.persistence.EntityNotFoundException;

View File

@@ -11,7 +11,6 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status; import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto; import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto.pred_requests_areas;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService; import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto; import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import jakarta.transaction.Transactional; import jakarta.transaction.Transactional;
@@ -48,137 +47,247 @@ public class MapSheetInferenceJobService {
@Value("${inference.url}") @Value("${inference.url}")
private String inferenceUrl; private String inferenceUrl;
/** 추론 진행 배치 1분 */ /**
* 추론 진행 배치 1분
*/
@Scheduled(fixedDelay = 60_000) @Scheduled(fixedDelay = 60_000)
@Transactional @Transactional
public void runBatch() { public void runBatch() {
if (isLocalProfile()) {
if ("local".equalsIgnoreCase(profile)) {
return; return;
} }
try { try {
InferenceBatchSheet batchSheet = // 진행중 배치 조회
inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId()); InferenceBatchSheet sheet = findInProgressSheet();
if (sheet == null) {
if (batchSheet == null) {
return; return;
} }
HttpHeaders headers = new HttpHeaders(); // 배치 아이디 가져오기
headers.setContentType(MediaType.APPLICATION_JSON); Long batchId = resolveBatchId(sheet);
headers.setAccept(List.of(MediaType.APPLICATION_JSON)); if (batchId == null || batchId == 0L) {
Long batchId = 0L;
if (batchSheet.getM3BatchId() != null) {
batchId = batchSheet.getM3BatchId();
} else if (batchSheet.getM2BatchId() != null) {
batchId = batchSheet.getM2BatchId();
} else if (batchSheet.getM1BatchId() != null) {
batchId = batchSheet.getM1BatchId();
}
if (batchId == 0L) {
return; return;
} }
String url = batchUrl + "/" + batchId; // 추론실행 상태 정보 가져오기
JobStatusDto job = fetchJobStatus(batchId);
ExternalCallResult<String> result = if (job == null) {
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
int status = result.statusCode();
if (status < 200 || status >= 300) {
return; return;
} }
String json = result.body(); if (isCompleted(job)) {
JobStatusDto dto = objectMapper.readValue(json, JobStatusDto.class); // 완료 처리
onCompleted(sheet, job);
int totalJobs = dto.getTotalJobs();
int completedJobs = dto.getCompletedJobs();
int failedJobs = dto.getFailedJobs();
// 성공, 실패 값 더해서 total 과 같으면 완료
String inferStatus = setStatus(totalJobs, completedJobs, failedJobs);
if ("COMPLETED".equals(inferStatus)) {
String type = batchSheet.getRunningModelType();
if (type.equals("M1")) {
// M1 완료되었으면 M2 실행
startInference(
batchSheet.getId(), batchSheet.getUuid(), "M2", batchSheet.getM2ModelUuid());
// 종료시간
updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M1");
} else if (type.equals("M2")) {
// M2 완료되었으면 M3 실행
startInference(
batchSheet.getId(), batchSheet.getUuid(), "M3", batchSheet.getM3ModelUuid());
// 종료시간
updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M2");
} else if (type.equals("M3")) {
// 완료
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(batchSheet.getUuid());
saveInferenceAiDto.setStatus(Status.END.getId());
saveInferenceAiDto.setInferEndDttm(ZonedDateTime.now());
saveInferenceAiDto.setType(type);
inferenceResultCoreService.update(saveInferenceAiDto);
// 종료시간
updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M3");
}
} else { } else {
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); // 진행중 처리
saveInferenceAiDto.setUuid(batchSheet.getUuid()); onProcessing(sheet, job);
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
saveInferenceAiDto.setDetectEndCnt((long) (completedJobs + failedJobs));
inferenceResultCoreService.update(saveInferenceAiDto);
} }
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
Thread.currentThread().interrupt(); // JSON 파싱 오류는 interrupt 대상 아님
log.error("배치 중 인터럽트 발생", e); log.error("배치 중 JSON 파싱 오류", e);
} catch (Exception e) {
log.error("배치 처리 중 예외", e);
} }
} }
private void startInference(Long id, UUID uuid, String type, UUID modelUuid) { /**
* 진행중 배치 조회
*
* @return
*/
private InferenceBatchSheet findInProgressSheet() {
return inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
}
InferenceProgressDto progressDto = /**
inferenceResultCoreService.getInferenceAiResultById(id, type, modelUuid); * batchId 결정
*
* @param sheet
* @return
*/
private Long resolveBatchId(InferenceBatchSheet sheet) {
// M3 > M2 > M1
if (sheet.getM3BatchId() != null) {
return sheet.getM3BatchId();
}
if (sheet.getM2BatchId() != null) {
return sheet.getM2BatchId();
}
if (sheet.getM1BatchId() != null) {
return sheet.getM1BatchId();
}
return 0L;
}
String inferenceType = ""; /**
* 추론실행 상태 정보 가져오기
*
* @param batchId
* @return
* @throws JsonProcessingException
*/
private JobStatusDto fetchJobStatus(Long batchId) throws JsonProcessingException {
String url = batchUrl + "/" + batchId;
if (type.equals("M1")) { ExternalCallResult<String> result =
inferenceType = "G1"; externalHttpClient.call(url, HttpMethod.GET, null, jsonHeaders(), String.class);
} else if (type.equals("M2")) {
inferenceType = "G2"; int status = result.statusCode();
} else if (type.equals("M3")) { if (status < 200 || status >= 300) {
inferenceType = "G3"; return null;
} }
pred_requests_areas predRequestsAreas = new pred_requests_areas(); return objectMapper.readValue(result.body(), JobStatusDto.class);
}
private HttpHeaders jsonHeaders() {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
return headers;
}
/**
* 완료 판단
*
* @param dto
* @return
*/
private boolean isCompleted(JobStatusDto dto) {
return dto.getTotalJobs() <= (dto.getCompletedJobs() + dto.getFailedJobs());
}
/**
* 완료 처리
*
* @param sheet
* @param job
*/
private void onCompleted(InferenceBatchSheet sheet, JobStatusDto job) {
String currentType = sheet.getRunningModelType();
ZonedDateTime now = ZonedDateTime.now();
// 현재 모델 종료 업데이트
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
// M3이면 전체 종료
if ("M3".equals(currentType)) {
endAll(sheet, now);
return;
}
// 다음 모델 실행 (M1->M2, M2->M3)
String nextType = nextModelType(currentType);
UUID nextModelUuid = resolveModelUuid(sheet, nextType);
// 다음 모델 추론 실행
startInference(sheet.getId(), sheet.getUuid(), nextType, nextModelUuid);
}
/**
* 추론 종료 할때 update
*
* @param sheet
* @param now
*/
private void endAll(InferenceBatchSheet sheet, ZonedDateTime now) {
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.END.getId());
save.setInferEndDttm(now);
save.setType("M3"); // 마지막 모델 기준
inferenceResultCoreService.update(save);
}
/**
* 다음 실행할 모델 타입 조회
*
* @param currentType
* @return
*/
private String nextModelType(String currentType) {
if ("M1".equals(currentType)) {
return "M2";
}
if ("M2".equals(currentType)) {
return "M3";
}
throw new IllegalArgumentException("Unknown runningModelType: " + currentType);
}
/**
* 모델 정보 UUID 가져오기
*
* @param sheet
* @param type
* @return
*/
private UUID resolveModelUuid(InferenceBatchSheet sheet, String type) {
if ("M1".equals(type)) {
return sheet.getM1ModelUuid();
}
if ("M2".equals(type)) {
return sheet.getM2ModelUuid();
}
if ("M3".equals(type)) {
return sheet.getM3ModelUuid();
}
throw new IllegalArgumentException("Unknown type: " + type);
}
/**
* 진행중 처리
*
* @param sheet
* @param job
*/
private void onProcessing(InferenceBatchSheet sheet, JobStatusDto job) {
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.IN_PROGRESS.getId());
save.setPendingJobs(job.getPendingJobs());
save.setRunningJobs(job.getRunningJobs());
save.setCompletedJobs(job.getCompletedJobs());
save.setFailedJobs(job.getFailedJobs());
inferenceResultCoreService.update(save);
}
/**
* 다음 모델 추론 실행
*
* @param id
* @param uuid
* @param type
* @param modelUuid
*/
private void startInference(Long id, UUID uuid, String type, UUID modelUuid) {
// 추론 실행 api 파라미터 조회
InferenceProgressDto progressDto =
inferenceResultCoreService.getInferenceAiResultById(id, modelUuid);
// ai 에 맞는 모델 명으로 변경
String inferenceType = modelToInferenceType(type);
InferenceSendDto.pred_requests_areas predRequestsAreas = new InferenceSendDto.pred_requests_areas();
predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year()); predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year());
predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year()); predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year());
predRequestsAreas.setInput1_scene_path( predRequestsAreas.setInput1_scene_path(progressDto.getPred_requests_areas().getInput1_scene_path());
progressDto.getPred_requests_areas().getInput1_scene_path()); predRequestsAreas.setInput2_scene_path(progressDto.getPred_requests_areas().getInput2_scene_path());
predRequestsAreas.setInput2_scene_path(
progressDto.getPred_requests_areas().getInput2_scene_path());
InferenceSendDto m = new InferenceSendDto(); InferenceSendDto m = new InferenceSendDto();
m.setPred_requests_areas(predRequestsAreas); m.setPred_requests_areas(predRequestsAreas);
m.setModel_version(progressDto.getModelVersion()); m.setModel_version(progressDto.getModelVersion());
m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName()); m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName());
m.setCd_model_config( m.setCd_model_config(progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName()); m.setCls_model_path(progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCls_model_path(
progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCls_model_version(progressDto.getClsModelVersion()); m.setCls_model_version(progressDto.getClsModelVersion());
m.setCd_model_type(inferenceType); m.setCd_model_type(inferenceType);
m.setPriority(progressDto.getPriority()); m.setPriority(progressDto.getPriority());
// 추론 다음모델 실행 // 추론 실행 api 호출
Long batchId = ensureAccepted(m); Long batchId = ensureAccepted(m);
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
@@ -191,71 +300,111 @@ public class MapSheetInferenceJobService {
inferenceResultCoreService.update(saveInferenceAiDto); inferenceResultCoreService.update(saveInferenceAiDto);
} }
/**
* ai 에 맞는 모델 명으로 변경
*
* @param type 모델 타입
* @return String
*/
private String modelToInferenceType(String type) {
if ("M1".equals(type)) {
return "G1";
}
if ("M2".equals(type)) {
return "G2";
}
if ("M3".equals(type)) {
return "G3";
}
throw new IllegalArgumentException("Unknown type: " + type);
}
/**
* api 호출
*
* @param dto
* @return
*/
private Long ensureAccepted(InferenceSendDto dto) { private Long ensureAccepted(InferenceSendDto dto) {
log.info("dto null? {}", dto == null); if (dto == null) {
ObjectMapper om = new ObjectMapper(); log.warn("not InferenceSendDto dto");
try { throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
log.info("dto json={}", om.writeValueAsString(dto));
} catch (Exception e) {
log.error(e.getMessage());
} }
HttpHeaders headers = new HttpHeaders(); // 1) 요청 로그
headers.setContentType(MediaType.APPLICATION_JSON); try {
headers.setAccept(List.of(MediaType.APPLICATION_JSON)); log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
} catch (JsonProcessingException e) {
log.warn("Failed to serialize inference dto", e);
}
// TODO 추후 삭제 // 2) local 환경 임시 처리
if ("local".equals(profile)) { if ("local".equals(profile)) {
if (dto.getPred_requests_areas() == null) { if (dto.getPred_requests_areas() == null) {
dto.setPred_requests_areas(new InferenceSendDto.pred_requests_areas()); throw new IllegalStateException("pred_requests_areas is null");
} }
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson"); dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson"); dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
} }
// 3) HTTP 호출
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result = ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class); externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
int status = result.statusCode(); if (result.statusCode() < 200 || result.statusCode() >= 300) {
String body = result.body(); log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
if (status < 200 || status >= 300) {
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY); throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
} }
Long batchId = 0L; // 4) 응답 파싱
try { try {
List<Map<String, Object>> list = List<Map<String, Object>> list =
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {}); objectMapper.readValue(result.body(), new TypeReference<>() {
});
Integer batchIdInt = (Integer) list.get(0).get("batch_id");
batchId = batchIdInt.longValue();
if (list.isEmpty()) { if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty"); throw new IllegalStateException("Inference response is empty");
} }
} catch (Exception e) { Object batchIdObj = list.get(0).get("batch_id");
log.error(e.getMessage()); if (batchIdObj == null) {
} throw new IllegalStateException("batch_id not found in response");
}
return batchId; return Long.valueOf(batchIdObj.toString());
} catch (Exception e) {
log.error("Failed to parse inference response. body={}", result.body(), e);
throw new CustomApiException("INVALID_INFERENCE_RESPONSE", HttpStatus.BAD_GATEWAY);
}
} }
private void updateProcessingEndTimeByModel(UUID uuid, ZonedDateTime dateTime, String type) { /**
* 실행중인 profile
*
* @return
*/
private boolean isLocalProfile() {
return "local".equalsIgnoreCase(profile);
}
/**
* 모델별 추론 종료 update
*/
private void updateProcessingEndTimeByModel(JobStatusDto dto, UUID uuid, ZonedDateTime dateTime, String type) {
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(uuid); saveInferenceAiDto.setUuid(uuid);
saveInferenceAiDto.setUpdateUid(0L); saveInferenceAiDto.setUpdateUid(0L);
saveInferenceAiDto.setModelEndDttm(dateTime); saveInferenceAiDto.setModelEndDttm(dateTime);
saveInferenceAiDto.setType(type); saveInferenceAiDto.setType(type);
saveInferenceAiDto.setPendingJobs(dto.getPendingJobs());
saveInferenceAiDto.setRunningJobs(dto.getRunningJobs());
saveInferenceAiDto.setCompletedJobs(dto.getCompletedJobs());
saveInferenceAiDto.setFailedJobs(dto.getFailedJobs());
inferenceResultCoreService.update(saveInferenceAiDto); inferenceResultCoreService.update(saveInferenceAiDto);
} }
private String setStatus(int totalJobs, int completedJobs, int failedJobs) {
if (totalJobs <= (completedJobs + failedJobs)) {
return "COMPLETED";
}
return "PROCESSING";
}
} }