[KC-103] 추론 실행 배치 수정

This commit is contained in:
2026-01-14 15:24:14 +09:00
parent 326591c4bd
commit 4d52e2a97a
5 changed files with 499 additions and 308 deletions

View File

@@ -23,7 +23,9 @@ import org.springframework.data.domain.Pageable;
public class InferenceResultDto {
/** 목록조회 dto */
/**
* 목록조회 dto
*/
@Getter
@Setter
@AllArgsConstructor
@@ -35,11 +37,15 @@ public class InferenceResultDto {
private String status;
private String mapSheetCnt;
private Long detectingCnt;
@JsonFormatDttm private ZonedDateTime startTime;
@JsonFormatDttm private ZonedDateTime endTime;
@JsonFormatDttm private ZonedDateTime elapsedTime;
@JsonFormatDttm
private ZonedDateTime startTime;
@JsonFormatDttm
private ZonedDateTime endTime;
@JsonFormatDttm
private ZonedDateTime elapsedTime;
private Boolean applyYn;
@JsonFormatDttm private ZonedDateTime applyDttm;
@JsonFormatDttm
private ZonedDateTime applyDttm;
@JsonProperty("statusName")
public String statusName() {
@@ -47,7 +53,9 @@ public class InferenceResultDto {
}
}
/** 목록조회 검색 조건 dto */
/**
* 목록조회 검색 조건 dto
*/
@Getter
@Setter
@NoArgsConstructor
@@ -69,7 +77,9 @@ public class InferenceResultDto {
}
}
/** 탐지 데이터 옵션 dto */
/**
* 탐지 데이터 옵션 dto
*/
@Getter
@AllArgsConstructor
public enum MapSheetScope implements EnumType {
@@ -90,7 +100,9 @@ public class InferenceResultDto {
}
}
/** 분석대상 도엽 enum */
/**
* 분석대상 도엽 enum
*/
@Getter
@AllArgsConstructor
public enum DetectOption implements EnumType {
@@ -138,7 +150,9 @@ public class InferenceResultDto {
}
}
/** 변화탐지 실행 정보 저장 요청 정보 */
/**
* 변화탐지 실행 정보 저장 요청 정보
*/
@Getter
@Setter
@NoArgsConstructor
@@ -177,8 +191,8 @@ public class InferenceResultDto {
@Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank
@EnumValid(
enumClass = DetectOption.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
enumClass = DetectOption.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private String detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
@@ -205,41 +219,49 @@ public class InferenceResultDto {
private Integer targetYyyy;
private String detectOption;
private String mapSheetScope;
@JsonFormatDttm private ZonedDateTime inferStartDttm;
@JsonFormatDttm private ZonedDateTime inferEndDttm;
@JsonFormatDttm
private ZonedDateTime inferStartDttm;
@JsonFormatDttm
private ZonedDateTime inferEndDttm;
private Long detectingCnt = 0L;
private Long detectingEndCnt = 0L;
@JsonFormatDttm private ZonedDateTime m1ModelStartDttm;
@JsonFormatDttm private ZonedDateTime m1ModelEndDttm;
@JsonFormatDttm private ZonedDateTime m2ModelStartDttm;
@JsonFormatDttm private ZonedDateTime m2ModelEndDttm;
@JsonFormatDttm private ZonedDateTime m3ModelStartDttm;
@JsonFormatDttm private ZonedDateTime m3ModelEndDttm;
@JsonFormatDttm
private ZonedDateTime m1ModelStartDttm;
@JsonFormatDttm
private ZonedDateTime m1ModelEndDttm;
@JsonFormatDttm
private ZonedDateTime m2ModelStartDttm;
@JsonFormatDttm
private ZonedDateTime m2ModelEndDttm;
@JsonFormatDttm
private ZonedDateTime m3ModelStartDttm;
@JsonFormatDttm
private ZonedDateTime m3ModelEndDttm;
private String model1Ver;
private String model2Ver;
private String model3Ver;
public InferenceStatusDetailDto(
String title,
Integer compareYyyy,
Integer targetYyyy,
String detectOption,
String mapSheetScope,
ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm,
Long detectingCnt,
Long detectingEndCnt,
ZonedDateTime m1ModelStartDttm,
ZonedDateTime m1ModelEndDttm,
ZonedDateTime m2ModelStartDttm,
ZonedDateTime m2ModelEndDttm,
ZonedDateTime m3ModelStartDttm,
ZonedDateTime m3ModelEndDttm,
String model1Ver,
String model2Ver,
String model3Ver) {
String title,
Integer compareYyyy,
Integer targetYyyy,
String detectOption,
String mapSheetScope,
ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm,
Long detectingCnt,
Long detectingEndCnt,
ZonedDateTime m1ModelStartDttm,
ZonedDateTime m1ModelEndDttm,
ZonedDateTime m2ModelStartDttm,
ZonedDateTime m2ModelEndDttm,
ZonedDateTime m3ModelStartDttm,
ZonedDateTime m3ModelEndDttm,
String model1Ver,
String model2Ver,
String model3Ver) {
this.title = title;
this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy;
@@ -342,9 +364,12 @@ public class InferenceResultDto {
public static class InferenceServerStatusDto {
private String serverName;
@JsonIgnore private float cpu_user;
@JsonIgnore private float cpu_system;
@JsonIgnore private float memused;
@JsonIgnore
private float cpu_user;
@JsonIgnore
private float cpu_system;
@JsonIgnore
private float memused;
private Long kbmemused;
private float gpuUtil;
@@ -458,5 +483,9 @@ public class InferenceResultDto {
private ZonedDateTime modelEndDttm;
private Long updateUid;
private String runningModelType;
private Integer pendingJobs;
private Integer runningJobs;
private Integer completedJobs;
private Integer failedJobs;
}
}

View File

@@ -106,7 +106,7 @@ public class InferenceResultService {
// 변화탐지 실행 가능 비교년도 조회
List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
if (compareList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
@@ -140,6 +140,7 @@ public class InferenceResultService {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
}
// 사용할 영상파일 년도 기록 및 추론에 포함되는지 설정
for (MngListDto target : targetList) {
for (Map<String, Object> map : totalNumList) {
if (target.getMapSheetNum().equals(map.get("mapSheetNum").toString())) {
@@ -154,20 +155,20 @@ public class InferenceResultService {
// 추론에 필요한 geojson 파일 생성
List<String> mapSheetNumList =
targetList.stream()
.filter(t -> Boolean.TRUE.equals(t.getIsSuccess()))
.map(MngListDto::getMapSheetNum)
.toList();
targetList.stream()
.filter(t -> Boolean.TRUE.equals(t.getIsSuccess()))
.map(MngListDto::getMapSheetNum)
.toList();
// 비교년도 geojson 파일 생성하여 경로 받기
String modelComparePath =
getSceneInference(
String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope());
getSceneInference(
String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope());
// 기준년도 geojson 파일 생성하여 경로 받기
String modelTargetPath =
getSceneInference(
String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope());
getSceneInference(
String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope());
// ai 서버에 전달할 파라미터 생성
pred_requests_areas predRequestsAreas = new pred_requests_areas();
@@ -199,19 +200,19 @@ public class InferenceResultService {
// 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용
private List<MapSheetNumDto> createdMngDto(
InferenceResultDto.RegReq req, List<MngListDto> targetList) {
InferenceResultDto.RegReq req, List<MngListDto> targetList) {
List<String> mapTargetIds = new ArrayList<>();
targetList.forEach(
hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum());
});
hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum());
});
// 비교년도 조회
List<String> mapCompareIds = new ArrayList<>();
List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
for (MngListCompareDto dto : compareList) {
// 추론 제외일때 이전년도 파일이 없으면 제외
@@ -226,35 +227,35 @@ public class InferenceResultService {
}
Set<String> compareSet =
mapCompareIds.stream()
.filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet());
mapCompareIds.stream()
.filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet());
// target 기준 compare 비교하여 서로 있는것만 저장
List<String> commonIds =
mapTargetIds.stream()
.filter(Objects::nonNull)
.map(String::trim)
.filter(compareSet::contains)
.toList();
mapTargetIds.stream()
.filter(Objects::nonNull)
.map(String::trim)
.filter(compareSet::contains)
.toList();
Set<String> commonIdSet =
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
// 저장하기위해 파라미터 다시 구성
List<MapSheetNumDto> mapSheetNum =
targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map(
dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto;
})
.toList();
targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map(
dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto;
})
.toList();
return mapSheetNum;
}
@@ -271,14 +272,14 @@ public class InferenceResultService {
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
}
// 1) 요청 로그 (debug 권장)
// 1) 요청 로그
try {
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
} catch (JsonProcessingException e) {
log.warn("Failed to serialize inference dto", e);
}
// 2) local 환경 임시 처리 (NPE 방어)
// 2) local 환경 임시 처리
if ("local".equals(profile)) {
if (dto.getPred_requests_areas() == null) {
throw new IllegalStateException("pred_requests_areas is null");
@@ -293,7 +294,7 @@ public class InferenceResultService {
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
@@ -303,7 +304,8 @@ public class InferenceResultService {
// 4) 응답 파싱
try {
List<Map<String, Object>> list =
objectMapper.readValue(result.body(), new TypeReference<>() {});
objectMapper.readValue(result.body(), new TypeReference<>() {
});
if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty");
@@ -372,8 +374,8 @@ public class InferenceResultService {
/**
* geojson 파일 생성
*
* @param yyyy 영상관리 파일별 년도
* @param mapSheetNums 5k 도엽 번호 리스트
* @param yyyy 영상관리 파일별 년도
* @param mapSheetNums 5k 도엽 번호 리스트
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
* @return
*/
@@ -408,7 +410,7 @@ public class InferenceResultService {
* @return
*/
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceResultGeomList(id, searchGeoReq);
}
@@ -419,7 +421,7 @@ public class InferenceResultService {
* @return
*/
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) {
@NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) {
return inferenceResultCoreService.listInferenceResultWithGeom(id, searchReq);
}
@@ -464,7 +466,7 @@ public class InferenceResultService {
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
List<InferenceServerStatusDto> servers =
inferenceResultCoreService.getInferenceServerStatusList();
inferenceResultCoreService.getInferenceServerStatusList();
String serverNames = "";
for (InferenceServerStatusDto server : servers) {

View File

@@ -29,6 +29,7 @@ import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.function.Consumer;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.dao.DataAccessException;
@@ -101,7 +102,7 @@ public class InferenceResultCoreService {
mapSheetLearnEntity.setMapSheetCnt(mapSheetName);
mapSheetLearnEntity.setDetectingCnt((long) detectingCnt);
mapSheetLearnEntity.setStage(
mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy()));
mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy()));
// learn 테이블 저장
MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity);
@@ -163,9 +164,9 @@ public class InferenceResultCoreService {
*/
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceDetailDto.AnalResSummary summary =
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
mapSheetAnalDataRepository
.getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary;
}
@@ -186,7 +187,7 @@ public class InferenceResultCoreService {
* @return
*/
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
}
@@ -198,16 +199,16 @@ public class InferenceResultCoreService {
*/
@Transactional(readOnly = true)
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed()
.toList();
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed()
.toList();
// 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
}
@@ -224,108 +225,112 @@ public class InferenceResultCoreService {
@Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf)
.toList();
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
.map(MapInkx5kEntity::toEntity)
.toList();
.map(MapInkx5kEntity::toEntity)
.toList();
}
/**
* learn 테이블 update
*
* @param request 추론 실행 정보
*/
public void update(SaveInferenceAiDto request) {
MapSheetLearnEntity entity =
mapSheetLearnRepository
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(() -> new EntityNotFoundException());
mapSheetLearnRepository.getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new);
// M1/M2/M3 영역 업데이트
if (request.getType() != null) {
switch (request.getType()) {
case "M1" -> {
if (request.getBatchId() != null) {
entity.setM1ModelBatchId(request.getBatchId());
}
if (request.getModelStartDttm() != null) {
entity.setM1ModelStartDttm(request.getModelStartDttm());
}
if (request.getModelEndDttm() != null) {
entity.setM1ModelEndDttm(request.getModelEndDttm());
}
}
case "M2" -> {
if (request.getBatchId() != null) {
entity.setM2ModelBatchId(request.getBatchId());
}
if (request.getModelStartDttm() != null) {
entity.setM2ModelStartDttm(request.getModelStartDttm());
}
if (request.getModelEndDttm() != null) {
entity.setM2ModelEndDttm(request.getModelEndDttm());
}
}
case "M3" -> {
if (request.getBatchId() != null) {
entity.setM3ModelBatchId(request.getBatchId());
}
if (request.getModelStartDttm() != null) {
entity.setM3ModelStartDttm(request.getModelStartDttm());
}
if (request.getModelEndDttm() != null) {
entity.setM3ModelEndDttm(request.getModelEndDttm());
}
}
}
applyModelUpdate(entity, request);
}
if (request.getRunningModelType() != null) {
entity.setRunningModelType(request.getRunningModelType());
}
if (request.getInferStartDttm() != null) {
entity.setInferStartDttm(request.getInferStartDttm());
}
if (request.getInferEndDttm() != null) {
entity.setInferEndDttm(request.getInferEndDttm());
}
if (request.getModelComparePath() != null) {
entity.setModelComparePath(request.getModelComparePath());
}
if (request.getModelTargetPath() != null) {
entity.setModelTargetPath(request.getModelTargetPath());
}
if (request.getDetectEndCnt() != null) {
entity.setDetectEndCnt(request.getDetectEndCnt());
}
if (request.getStatus() != null) {
entity.setStatus(request.getStatus());
}
if (request.getUpdateUid() != null) {
entity.setUpdatedUid(request.getUpdateUid());
}
// 공통 영역 업데이트
applyIfNotNull(request.getRunningModelType(), entity::setRunningModelType);
applyIfNotNull(request.getInferStartDttm(), entity::setInferStartDttm);
applyIfNotNull(request.getInferEndDttm(), entity::setInferEndDttm);
applyIfNotNull(request.getModelComparePath(), entity::setModelComparePath);
applyIfNotNull(request.getModelTargetPath(), entity::setModelTargetPath);
applyIfNotNull(request.getDetectEndCnt(), entity::setDetectEndCnt);
applyIfNotNull(request.getStatus(), entity::setStatus);
applyIfNotNull(request.getUpdateUid(), entity::setUpdatedUid);
entity.setUpdatedDttm(ZonedDateTime.now());
}
private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) {
switch (request.getType()) {
case "M1" -> applyModelFields(
request,
entity::setM1ModelBatchId,
entity::setM1ModelStartDttm,
entity::setM1ModelEndDttm,
entity::setM1PendingJobs,
entity::setM1RunningJobs,
entity::setM1CompletedJobs,
entity::setM1FailedJobs
);
case "M2" -> applyModelFields(
request,
entity::setM2ModelBatchId,
entity::setM2ModelStartDttm,
entity::setM2ModelEndDttm,
entity::setM2PendingJobs,
entity::setM2RunningJobs,
entity::setM2CompletedJobs,
entity::setM2FailedJobs
);
case "M3" -> applyModelFields(
request,
entity::setM3ModelBatchId,
entity::setM3ModelStartDttm,
entity::setM3ModelEndDttm,
entity::setM3PendingJobs,
entity::setM3RunningJobs,
entity::setM3CompletedJobs,
entity::setM3FailedJobs
);
default -> throw new IllegalArgumentException("Unknown type: " + request.getType());
}
}
private void applyModelFields(
SaveInferenceAiDto request,
Consumer<Long> setBatchId,
Consumer<ZonedDateTime> setStart,
Consumer<ZonedDateTime> setEnd,
Consumer<Integer> setPending,
Consumer<Integer> setRunning,
Consumer<Integer> setCompleted,
Consumer<Integer> setFailed
) {
applyIfNotNull(request.getBatchId(), setBatchId);
applyIfNotNull(request.getModelStartDttm(), setStart);
applyIfNotNull(request.getModelEndDttm(), setEnd);
applyIfNotNull(request.getPendingJobs(), setPending);
applyIfNotNull(request.getRunningJobs(), setRunning);
applyIfNotNull(request.getCompletedJobs(), setCompleted);
applyIfNotNull(request.getFailedJobs(), setFailed);
}
private <T> void applyIfNotNull(T value, Consumer<T> setter) {
if (value != null) {
setter.accept(value);
}
}
public List<InferenceServerStatusDto> getInferenceServerStatusList() {
return mapSheetLearnRepository.getInferenceServerStatusList();
}
public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity =
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
if (entity == null) {
return null;
@@ -345,7 +350,14 @@ public class InferenceResultCoreService {
return inferenceBatchSheet;
}
public InferenceProgressDto getInferenceAiResultById(Long id, String type, UUID modelUuid) {
/**
* 추론 실행 api 파라미터 조회
*
* @param id
* @param modelUuid
* @return
*/
public InferenceProgressDto getInferenceAiResultById(Long id, UUID modelUuid) {
return mapSheetLearnRepository.getInferenceAiResultById(id, modelUuid);
}
@@ -364,7 +376,7 @@ public class InferenceResultCoreService {
/**
* @param compareYear 비교년도
* @param targetYear 기준년도
* @param targetYear 기준년도
* @return
*/
public Integer getLearnStage(Integer compareYear, Integer targetYear) {

View File

@@ -34,7 +34,6 @@ import com.querydsl.core.types.dsl.CaseBuilder;
import com.querydsl.core.types.dsl.Expressions;
import com.querydsl.core.types.dsl.NumberExpression;
import com.querydsl.core.types.dsl.StringExpression;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.impl.JPAQueryFactory;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityNotFoundException;

View File

@@ -11,7 +11,6 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceProgressDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto.pred_requests_areas;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import jakarta.transaction.Transactional;
@@ -48,137 +47,247 @@ public class MapSheetInferenceJobService {
@Value("${inference.url}")
private String inferenceUrl;
/** 추론 진행 배치 1분 */
/**
* 추론 진행 배치 1분
*/
@Scheduled(fixedDelay = 60_000)
@Transactional
public void runBatch() {
if ("local".equalsIgnoreCase(profile)) {
if (isLocalProfile()) {
return;
}
try {
InferenceBatchSheet batchSheet =
inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
if (batchSheet == null) {
// 진행중 배치 조회
InferenceBatchSheet sheet = findInProgressSheet();
if (sheet == null) {
return;
}
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
Long batchId = 0L;
if (batchSheet.getM3BatchId() != null) {
batchId = batchSheet.getM3BatchId();
} else if (batchSheet.getM2BatchId() != null) {
batchId = batchSheet.getM2BatchId();
} else if (batchSheet.getM1BatchId() != null) {
batchId = batchSheet.getM1BatchId();
}
if (batchId == 0L) {
// 배치 아이디 가져오기
Long batchId = resolveBatchId(sheet);
if (batchId == null || batchId == 0L) {
return;
}
String url = batchUrl + "/" + batchId;
ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
int status = result.statusCode();
if (status < 200 || status >= 300) {
// 추론실행 상태 정보 가져오기
JobStatusDto job = fetchJobStatus(batchId);
if (job == null) {
return;
}
String json = result.body();
JobStatusDto dto = objectMapper.readValue(json, JobStatusDto.class);
int totalJobs = dto.getTotalJobs();
int completedJobs = dto.getCompletedJobs();
int failedJobs = dto.getFailedJobs();
// 성공, 실패 값 더해서 total 과 같으면 완료
String inferStatus = setStatus(totalJobs, completedJobs, failedJobs);
if ("COMPLETED".equals(inferStatus)) {
String type = batchSheet.getRunningModelType();
if (type.equals("M1")) {
// M1 완료되었으면 M2 실행
startInference(
batchSheet.getId(), batchSheet.getUuid(), "M2", batchSheet.getM2ModelUuid());
// 종료시간
updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M1");
} else if (type.equals("M2")) {
// M2 완료되었으면 M3 실행
startInference(
batchSheet.getId(), batchSheet.getUuid(), "M3", batchSheet.getM3ModelUuid());
// 종료시간
updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M2");
} else if (type.equals("M3")) {
// 완료
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(batchSheet.getUuid());
saveInferenceAiDto.setStatus(Status.END.getId());
saveInferenceAiDto.setInferEndDttm(ZonedDateTime.now());
saveInferenceAiDto.setType(type);
inferenceResultCoreService.update(saveInferenceAiDto);
// 종료시간
updateProcessingEndTimeByModel(batchSheet.getUuid(), ZonedDateTime.now(), "M3");
}
if (isCompleted(job)) {
// 완료 처리
onCompleted(sheet, job);
} else {
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(batchSheet.getUuid());
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
saveInferenceAiDto.setDetectEndCnt((long) (completedJobs + failedJobs));
inferenceResultCoreService.update(saveInferenceAiDto);
// 진행중 처리
onProcessing(sheet, job);
}
} catch (JsonProcessingException e) {
Thread.currentThread().interrupt();
log.error("배치 중 인터럽트 발생", e);
// JSON 파싱 오류는 interrupt 대상 아님
log.error("배치 중 JSON 파싱 오류", e);
} catch (Exception e) {
log.error("배치 처리 중 예외", e);
}
}
private void startInference(Long id, UUID uuid, String type, UUID modelUuid) {
/**
* 진행중 배치 조회
*
* @return
*/
private InferenceBatchSheet findInProgressSheet() {
return inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
}
InferenceProgressDto progressDto =
inferenceResultCoreService.getInferenceAiResultById(id, type, modelUuid);
/**
* batchId 결정
*
* @param sheet
* @return
*/
private Long resolveBatchId(InferenceBatchSheet sheet) {
// M3 > M2 > M1
if (sheet.getM3BatchId() != null) {
return sheet.getM3BatchId();
}
if (sheet.getM2BatchId() != null) {
return sheet.getM2BatchId();
}
if (sheet.getM1BatchId() != null) {
return sheet.getM1BatchId();
}
return 0L;
}
String inferenceType = "";
/**
* 추론실행 상태 정보 가져오기
*
* @param batchId
* @return
* @throws JsonProcessingException
*/
private JobStatusDto fetchJobStatus(Long batchId) throws JsonProcessingException {
String url = batchUrl + "/" + batchId;
if (type.equals("M1")) {
inferenceType = "G1";
} else if (type.equals("M2")) {
inferenceType = "G2";
} else if (type.equals("M3")) {
inferenceType = "G3";
ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, jsonHeaders(), String.class);
int status = result.statusCode();
if (status < 200 || status >= 300) {
return null;
}
pred_requests_areas predRequestsAreas = new pred_requests_areas();
return objectMapper.readValue(result.body(), JobStatusDto.class);
}
private HttpHeaders jsonHeaders() {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
return headers;
}
/**
* 완료 판단
*
* @param dto
* @return
*/
private boolean isCompleted(JobStatusDto dto) {
return dto.getTotalJobs() <= (dto.getCompletedJobs() + dto.getFailedJobs());
}
/**
* 완료 처리
*
* @param sheet
* @param job
*/
private void onCompleted(InferenceBatchSheet sheet, JobStatusDto job) {
String currentType = sheet.getRunningModelType();
ZonedDateTime now = ZonedDateTime.now();
// 현재 모델 종료 업데이트
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
// M3이면 전체 종료
if ("M3".equals(currentType)) {
endAll(sheet, now);
return;
}
// 다음 모델 실행 (M1->M2, M2->M3)
String nextType = nextModelType(currentType);
UUID nextModelUuid = resolveModelUuid(sheet, nextType);
// 다음 모델 추론 실행
startInference(sheet.getId(), sheet.getUuid(), nextType, nextModelUuid);
}
/**
* 추론 종료 할때 update
*
* @param sheet
* @param now
*/
private void endAll(InferenceBatchSheet sheet, ZonedDateTime now) {
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.END.getId());
save.setInferEndDttm(now);
save.setType("M3"); // 마지막 모델 기준
inferenceResultCoreService.update(save);
}
/**
* 다음 실행할 모델 타입 조회
*
* @param currentType
* @return
*/
private String nextModelType(String currentType) {
if ("M1".equals(currentType)) {
return "M2";
}
if ("M2".equals(currentType)) {
return "M3";
}
throw new IllegalArgumentException("Unknown runningModelType: " + currentType);
}
/**
* 모델 정보 UUID 가져오기
*
* @param sheet
* @param type
* @return
*/
private UUID resolveModelUuid(InferenceBatchSheet sheet, String type) {
if ("M1".equals(type)) {
return sheet.getM1ModelUuid();
}
if ("M2".equals(type)) {
return sheet.getM2ModelUuid();
}
if ("M3".equals(type)) {
return sheet.getM3ModelUuid();
}
throw new IllegalArgumentException("Unknown type: " + type);
}
/**
* 진행중 처리
*
* @param sheet
* @param job
*/
private void onProcessing(InferenceBatchSheet sheet, JobStatusDto job) {
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.IN_PROGRESS.getId());
save.setPendingJobs(job.getPendingJobs());
save.setRunningJobs(job.getRunningJobs());
save.setCompletedJobs(job.getCompletedJobs());
save.setFailedJobs(job.getFailedJobs());
inferenceResultCoreService.update(save);
}
/**
* 다음 모델 추론 실행
*
* @param id
* @param uuid
* @param type
* @param modelUuid
*/
private void startInference(Long id, UUID uuid, String type, UUID modelUuid) {
// 추론 실행 api 파라미터 조회
InferenceProgressDto progressDto =
inferenceResultCoreService.getInferenceAiResultById(id, modelUuid);
// ai 에 맞는 모델 명으로 변경
String inferenceType = modelToInferenceType(type);
InferenceSendDto.pred_requests_areas predRequestsAreas = new InferenceSendDto.pred_requests_areas();
predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year());
predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year());
predRequestsAreas.setInput1_scene_path(
progressDto.getPred_requests_areas().getInput1_scene_path());
predRequestsAreas.setInput2_scene_path(
progressDto.getPred_requests_areas().getInput2_scene_path());
predRequestsAreas.setInput1_scene_path(progressDto.getPred_requests_areas().getInput1_scene_path());
predRequestsAreas.setInput2_scene_path(progressDto.getPred_requests_areas().getInput2_scene_path());
InferenceSendDto m = new InferenceSendDto();
m.setPred_requests_areas(predRequestsAreas);
m.setModel_version(progressDto.getModelVersion());
m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName());
m.setCd_model_config(
progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
m.setCls_model_path(
progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCd_model_config(progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
m.setCls_model_path(progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCls_model_version(progressDto.getClsModelVersion());
m.setCd_model_type(inferenceType);
m.setPriority(progressDto.getPriority());
// 추론 다음모델 실행
// 추론 실행 api 호출
Long batchId = ensureAccepted(m);
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
@@ -191,71 +300,111 @@ public class MapSheetInferenceJobService {
inferenceResultCoreService.update(saveInferenceAiDto);
}
/**
* ai 에 맞는 모델 명으로 변경
*
* @param type 모델 타입
* @return String
*/
private String modelToInferenceType(String type) {
if ("M1".equals(type)) {
return "G1";
}
if ("M2".equals(type)) {
return "G2";
}
if ("M3".equals(type)) {
return "G3";
}
throw new IllegalArgumentException("Unknown type: " + type);
}
/**
* api 호출
*
* @param dto
* @return
*/
private Long ensureAccepted(InferenceSendDto dto) {
log.info("dto null? {}", dto == null);
ObjectMapper om = new ObjectMapper();
try {
log.info("dto json={}", om.writeValueAsString(dto));
} catch (Exception e) {
log.error(e.getMessage());
if (dto == null) {
log.warn("not InferenceSendDto dto");
throw new CustomApiException("BAD_REQUEST", HttpStatus.BAD_REQUEST);
}
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
// 1) 요청 로그
try {
log.debug("Inference request dto={}", objectMapper.writeValueAsString(dto));
} catch (JsonProcessingException e) {
log.warn("Failed to serialize inference dto", e);
}
// TODO 추후 삭제
// 2) local 환경 임시 처리
if ("local".equals(profile)) {
if (dto.getPred_requests_areas() == null) {
dto.setPred_requests_areas(new InferenceSendDto.pred_requests_areas());
throw new IllegalStateException("pred_requests_areas is null");
}
dto.getPred_requests_areas().setInput1_scene_path("/kamco-nfs/requests/2023_local.geojson");
dto.getPred_requests_areas().setInput2_scene_path("/kamco-nfs/requests/2024_local.geojson");
}
// 3) HTTP 호출
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
int status = result.statusCode();
String body = result.body();
if (status < 200 || status >= 300) {
if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
}
Long batchId = 0L;
// 4) 응답 파싱
try {
List<Map<String, Object>> list =
om.readValue(body, new TypeReference<List<Map<String, Object>>>() {});
objectMapper.readValue(result.body(), new TypeReference<>() {
});
Integer batchIdInt = (Integer) list.get(0).get("batch_id");
batchId = batchIdInt.longValue();
if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty");
}
} catch (Exception e) {
log.error(e.getMessage());
}
Object batchIdObj = list.get(0).get("batch_id");
if (batchIdObj == null) {
throw new IllegalStateException("batch_id not found in response");
}
return batchId;
return Long.valueOf(batchIdObj.toString());
} catch (Exception e) {
log.error("Failed to parse inference response. body={}", result.body(), e);
throw new CustomApiException("INVALID_INFERENCE_RESPONSE", HttpStatus.BAD_GATEWAY);
}
}
private void updateProcessingEndTimeByModel(UUID uuid, ZonedDateTime dateTime, String type) {
/**
* 실행중인 profile
*
* @return
*/
private boolean isLocalProfile() {
return "local".equalsIgnoreCase(profile);
}
/**
* 모델별 추론 종료 update
*/
private void updateProcessingEndTimeByModel(JobStatusDto dto, UUID uuid, ZonedDateTime dateTime, String type) {
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(uuid);
saveInferenceAiDto.setUpdateUid(0L);
saveInferenceAiDto.setModelEndDttm(dateTime);
saveInferenceAiDto.setType(type);
saveInferenceAiDto.setPendingJobs(dto.getPendingJobs());
saveInferenceAiDto.setRunningJobs(dto.getRunningJobs());
saveInferenceAiDto.setCompletedJobs(dto.getCompletedJobs());
saveInferenceAiDto.setFailedJobs(dto.getFailedJobs());
inferenceResultCoreService.update(saveInferenceAiDto);
}
private String setStatus(int totalJobs, int completedJobs, int failedJobs) {
if (totalJobs <= (completedJobs + failedJobs)) {
return "COMPLETED";
}
return "PROCESSING";
}
}