[KC-103] spot less 적용

This commit is contained in:
2026-01-14 15:24:45 +09:00
parent d417972a5a
commit 1ae69e5665
4 changed files with 161 additions and 186 deletions

View File

@@ -23,9 +23,7 @@ import org.springframework.data.domain.Pageable;
public class InferenceResultDto { public class InferenceResultDto {
/** /** 목록조회 dto */
* 목록조회 dto
*/
@Getter @Getter
@Setter @Setter
@AllArgsConstructor @AllArgsConstructor
@@ -37,15 +35,11 @@ public class InferenceResultDto {
private String status; private String status;
private String mapSheetCnt; private String mapSheetCnt;
private Long detectingCnt; private Long detectingCnt;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime startTime;
private ZonedDateTime startTime; @JsonFormatDttm private ZonedDateTime endTime;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime elapsedTime;
private ZonedDateTime endTime;
@JsonFormatDttm
private ZonedDateTime elapsedTime;
private Boolean applyYn; private Boolean applyYn;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime applyDttm;
private ZonedDateTime applyDttm;
@JsonProperty("statusName") @JsonProperty("statusName")
public String statusName() { public String statusName() {
@@ -53,9 +47,7 @@ public class InferenceResultDto {
} }
} }
/** /** 목록조회 검색 조건 dto */
* 목록조회 검색 조건 dto
*/
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@@ -77,9 +69,7 @@ public class InferenceResultDto {
} }
} }
/** /** 탐지 데이터 옵션 dto */
* 탐지 데이터 옵션 dto
*/
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum MapSheetScope implements EnumType { public enum MapSheetScope implements EnumType {
@@ -100,9 +90,7 @@ public class InferenceResultDto {
} }
} }
/** /** 분석대상 도엽 enum */
* 분석대상 도엽 enum
*/
@Getter @Getter
@AllArgsConstructor @AllArgsConstructor
public enum DetectOption implements EnumType { public enum DetectOption implements EnumType {
@@ -150,9 +138,7 @@ public class InferenceResultDto {
} }
} }
/** /** 변화탐지 실행 정보 저장 요청 정보 */
* 변화탐지 실행 정보 저장 요청 정보
*/
@Getter @Getter
@Setter @Setter
@NoArgsConstructor @NoArgsConstructor
@@ -191,8 +177,8 @@ public class InferenceResultDto {
@Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL") @Schema(description = "탐지 데이터 옵션 - 추론제외(EXCL), 이전 년도 도엽 사용(PREV)", example = "EXCL")
@NotBlank @NotBlank
@EnumValid( @EnumValid(
enumClass = DetectOption.class, enumClass = DetectOption.class,
message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.") message = "탐지 데이터 옵션은 '추론제외', '이전 년도 도엽 사용' 만 사용 가능합니다.")
private String detectOption; private String detectOption;
@Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]") @Schema(description = "5k 도협 번호 목록", example = "[33605,33606, 33610, 34802, 35603, 35611]")
@@ -219,49 +205,41 @@ public class InferenceResultDto {
private Integer targetYyyy; private Integer targetYyyy;
private String detectOption; private String detectOption;
private String mapSheetScope; private String mapSheetScope;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime inferStartDttm;
private ZonedDateTime inferStartDttm; @JsonFormatDttm private ZonedDateTime inferEndDttm;
@JsonFormatDttm
private ZonedDateTime inferEndDttm;
private Long detectingCnt = 0L; private Long detectingCnt = 0L;
private Long detectingEndCnt = 0L; private Long detectingEndCnt = 0L;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime m1ModelStartDttm;
private ZonedDateTime m1ModelStartDttm; @JsonFormatDttm private ZonedDateTime m1ModelEndDttm;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime m2ModelStartDttm;
private ZonedDateTime m1ModelEndDttm; @JsonFormatDttm private ZonedDateTime m2ModelEndDttm;
@JsonFormatDttm @JsonFormatDttm private ZonedDateTime m3ModelStartDttm;
private ZonedDateTime m2ModelStartDttm; @JsonFormatDttm private ZonedDateTime m3ModelEndDttm;
@JsonFormatDttm
private ZonedDateTime m2ModelEndDttm;
@JsonFormatDttm
private ZonedDateTime m3ModelStartDttm;
@JsonFormatDttm
private ZonedDateTime m3ModelEndDttm;
private String model1Ver; private String model1Ver;
private String model2Ver; private String model2Ver;
private String model3Ver; private String model3Ver;
public InferenceStatusDetailDto( public InferenceStatusDetailDto(
String title, String title,
Integer compareYyyy, Integer compareYyyy,
Integer targetYyyy, Integer targetYyyy,
String detectOption, String detectOption,
String mapSheetScope, String mapSheetScope,
ZonedDateTime inferStartDttm, ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm, ZonedDateTime inferEndDttm,
Long detectingCnt, Long detectingCnt,
Long detectingEndCnt, Long detectingEndCnt,
ZonedDateTime m1ModelStartDttm, ZonedDateTime m1ModelStartDttm,
ZonedDateTime m1ModelEndDttm, ZonedDateTime m1ModelEndDttm,
ZonedDateTime m2ModelStartDttm, ZonedDateTime m2ModelStartDttm,
ZonedDateTime m2ModelEndDttm, ZonedDateTime m2ModelEndDttm,
ZonedDateTime m3ModelStartDttm, ZonedDateTime m3ModelStartDttm,
ZonedDateTime m3ModelEndDttm, ZonedDateTime m3ModelEndDttm,
String model1Ver, String model1Ver,
String model2Ver, String model2Ver,
String model3Ver) { String model3Ver) {
this.title = title; this.title = title;
this.compareYyyy = compareYyyy; this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy; this.targetYyyy = targetYyyy;
@@ -364,12 +342,9 @@ public class InferenceResultDto {
public static class InferenceServerStatusDto { public static class InferenceServerStatusDto {
private String serverName; private String serverName;
@JsonIgnore @JsonIgnore private float cpu_user;
private float cpu_user; @JsonIgnore private float cpu_system;
@JsonIgnore @JsonIgnore private float memused;
private float cpu_system;
@JsonIgnore
private float memused;
private Long kbmemused; private Long kbmemused;
private float gpuUtil; private float gpuUtil;

View File

@@ -107,7 +107,7 @@ public class InferenceResultService {
// 변화탐지 실행 가능 비교년도 조회 // 변화탐지 실행 가능 비교년도 조회
List<MngListCompareDto> compareList = List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds); mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
if (compareList.isEmpty()) { if (compareList.isEmpty()) {
throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND); throw new CustomApiException("NOT_FOUND_COMPARE_YEAR", HttpStatus.NOT_FOUND);
@@ -156,20 +156,20 @@ public class InferenceResultService {
// 추론에 필요한 geojson 파일 생성 // 추론에 필요한 geojson 파일 생성
List<String> mapSheetNumList = List<String> mapSheetNumList =
targetList.stream() targetList.stream()
.filter(t -> Boolean.TRUE.equals(t.getIsSuccess())) .filter(t -> Boolean.TRUE.equals(t.getIsSuccess()))
.map(MngListDto::getMapSheetNum) .map(MngListDto::getMapSheetNum)
.toList(); .toList();
// 비교년도 geojson 파일 생성하여 경로 받기 // 비교년도 geojson 파일 생성하여 경로 받기
String modelComparePath = String modelComparePath =
getSceneInference( getSceneInference(
String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope()); String.valueOf(req.getCompareYyyy()), mapSheetNumList, req.getMapSheetScope());
// 기준년도 geojson 파일 생성하여 경로 받기 // 기준년도 geojson 파일 생성하여 경로 받기
String modelTargetPath = String modelTargetPath =
getSceneInference( getSceneInference(
String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope()); String.valueOf(req.getTargetYyyy()), mapSheetNumList, req.getMapSheetScope());
// ai 서버에 전달할 파라미터 생성 // ai 서버에 전달할 파라미터 생성
pred_requests_areas predRequestsAreas = new pred_requests_areas(); pred_requests_areas predRequestsAreas = new pred_requests_areas();
@@ -201,19 +201,19 @@ public class InferenceResultService {
// 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용 // 비교년도 탐지 제이터 옵션 별로 조회하여 req에 적용
private List<MapSheetNumDto> createdMngDto( private List<MapSheetNumDto> createdMngDto(
InferenceResultDto.RegReq req, List<MngListDto> targetList) { InferenceResultDto.RegReq req, List<MngListDto> targetList) {
List<String> mapTargetIds = new ArrayList<>(); List<String> mapTargetIds = new ArrayList<>();
targetList.forEach( targetList.forEach(
hstMapSheet -> { hstMapSheet -> {
// 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기 // 비교년도는 target 년도 기준으로 가져옴 파라미터 만들기
mapTargetIds.add(hstMapSheet.getMapSheetNum()); mapTargetIds.add(hstMapSheet.getMapSheetNum());
}); });
// 비교년도 조회 // 비교년도 조회
List<String> mapCompareIds = new ArrayList<>(); List<String> mapCompareIds = new ArrayList<>();
List<MngListCompareDto> compareList = List<MngListCompareDto> compareList =
mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds); mapSheetMngCoreService.getByHstMapSheetCompareList(req.getCompareYyyy(), mapTargetIds);
for (MngListCompareDto dto : compareList) { for (MngListCompareDto dto : compareList) {
// 추론 제외일때 이전년도 파일이 없으면 제외 // 추론 제외일때 이전년도 파일이 없으면 제외
@@ -228,35 +228,35 @@ public class InferenceResultService {
} }
Set<String> compareSet = Set<String> compareSet =
mapCompareIds.stream() mapCompareIds.stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(String::trim) // 공백/개행 방지 .map(String::trim) // 공백/개행 방지
.collect(Collectors.toSet()); .collect(Collectors.toSet());
// target 기준 compare 비교하여 서로 있는것만 저장 // target 기준 compare 비교하여 서로 있는것만 저장
List<String> commonIds = List<String> commonIds =
mapTargetIds.stream() mapTargetIds.stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.map(String::trim) .map(String::trim)
.filter(compareSet::contains) .filter(compareSet::contains)
.toList(); .toList();
Set<String> commonIdSet = Set<String> commonIdSet =
commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet()); commonIds.stream().filter(Objects::nonNull).map(String::trim).collect(Collectors.toSet());
// 저장하기위해 파라미터 다시 구성 // 저장하기위해 파라미터 다시 구성
List<MapSheetNumDto> mapSheetNum = List<MapSheetNumDto> mapSheetNum =
targetList.stream() targetList.stream()
.filter(dto -> dto.getMapSheetNum() != null) .filter(dto -> dto.getMapSheetNum() != null)
.filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim())) .filter(dto -> commonIdSet.contains(dto.getMapSheetNum().trim()))
.map( .map(
dto -> { dto -> {
MapSheetNumDto mapSheetNumDto = new MapSheetNumDto(); MapSheetNumDto mapSheetNumDto = new MapSheetNumDto();
mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum()); mapSheetNumDto.setMapSheetNum(dto.getMapSheetNum());
mapSheetNumDto.setMapSheetName(dto.getMapSheetName()); mapSheetNumDto.setMapSheetName(dto.getMapSheetName());
return mapSheetNumDto; return mapSheetNumDto;
}) })
.toList(); .toList();
return mapSheetNum; return mapSheetNum;
} }
@@ -295,7 +295,7 @@ public class InferenceResultService {
headers.setAccept(List.of(MediaType.APPLICATION_JSON)); headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result = ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class); externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
if (result.statusCode() < 200 || result.statusCode() >= 300) { if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body()); log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
@@ -305,8 +305,7 @@ public class InferenceResultService {
// 4) 응답 파싱 // 4) 응답 파싱
try { try {
List<Map<String, Object>> list = List<Map<String, Object>> list =
objectMapper.readValue(result.body(), new TypeReference<>() { objectMapper.readValue(result.body(), new TypeReference<>() {});
});
if (list.isEmpty()) { if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty"); throw new IllegalStateException("Inference response is empty");
@@ -375,8 +374,8 @@ public class InferenceResultService {
/** /**
* geojson 파일 생성 * geojson 파일 생성
* *
* @param yyyy 영상관리 파일별 년도 * @param yyyy 영상관리 파일별 년도
* @param mapSheetNums 5k 도엽 번호 리스트 * @param mapSheetNums 5k 도엽 번호 리스트
* @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용 * @param mapSheetScope EXCL : 추론제외, PREV 이전 년도 도엽 사용
* @return * @return
*/ */
@@ -411,7 +410,7 @@ public class InferenceResultService {
* @return * @return
*/ */
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList( public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) { Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return inferenceResultCoreService.getInferenceResultGeomList(id, searchGeoReq); return inferenceResultCoreService.getInferenceResultGeomList(id, searchGeoReq);
} }
@@ -422,7 +421,7 @@ public class InferenceResultService {
* @return * @return
*/ */
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom( public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) { @NotNull Long id, InferenceDetailDto.SearchGeoReq searchReq) {
return inferenceResultCoreService.listInferenceResultWithGeom(id, searchReq); return inferenceResultCoreService.listInferenceResultWithGeom(id, searchReq);
} }
@@ -467,7 +466,7 @@ public class InferenceResultService {
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) { public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
List<InferenceServerStatusDto> servers = List<InferenceServerStatusDto> servers =
inferenceResultCoreService.getInferenceServerStatusList(); inferenceResultCoreService.getInferenceServerStatusList();
String serverNames = ""; String serverNames = "";
for (InferenceServerStatusDto server : servers) { for (InferenceServerStatusDto server : servers) {

View File

@@ -103,7 +103,7 @@ public class InferenceResultCoreService {
mapSheetLearnEntity.setMapSheetCnt(mapSheetName); mapSheetLearnEntity.setMapSheetCnt(mapSheetName);
mapSheetLearnEntity.setDetectingCnt((long) detectingCnt); mapSheetLearnEntity.setDetectingCnt((long) detectingCnt);
mapSheetLearnEntity.setStage( mapSheetLearnEntity.setStage(
mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy())); mapSheetLearnRepository.getLearnStage(req.getCompareYyyy(), req.getTargetYyyy()));
// learn 테이블 저장 // learn 테이블 저장
MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity); MapSheetLearnEntity savedLearn = mapSheetLearnRepository.save(mapSheetLearnEntity);
@@ -165,9 +165,9 @@ public class InferenceResultCoreService {
*/ */
public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) { public InferenceDetailDto.AnalResSummary getInferenceResultSummary(Long id) {
InferenceDetailDto.AnalResSummary summary = InferenceDetailDto.AnalResSummary summary =
mapSheetAnalDataRepository mapSheetAnalDataRepository
.getInferenceResultSummary(id) .getInferenceResultSummary(id)
.orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id)); .orElseThrow(() -> new EntityNotFoundException("요약정보를 찾을 수 없습니다. " + id));
return summary; return summary;
} }
@@ -188,7 +188,7 @@ public class InferenceResultCoreService {
* @return * @return
*/ */
public Page<InferenceDetailDto.Geom> getInferenceResultGeomList( public Page<InferenceDetailDto.Geom> getInferenceResultGeomList(
Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) { Long id, InferenceDetailDto.SearchGeoReq searchGeoReq) {
return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq); return mapSheetAnalDataRepository.getInferenceGeomList(id, searchGeoReq);
} }
@@ -200,16 +200,16 @@ public class InferenceResultCoreService {
*/ */
@Transactional(readOnly = true) @Transactional(readOnly = true)
public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom( public Page<InferenceDetailDto.DetailListEntity> listInferenceResultWithGeom(
@NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) { @NotNull Long analyId, InferenceDetailDto.SearchGeoReq searchReq) {
// 분석 ID 에 해당하는 dataids를 가져온다. // 분석 ID 에 해당하는 dataids를 가져온다.
List<Long> dataIds = List<Long> dataIds =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getId) .mapToLong(MapSheetAnalDataInferenceEntity::getId)
.boxed() .boxed()
.toList(); .toList();
// 해당데이터의 폴리곤데이터를 가져온다 // 해당데이터의 폴리곤데이터를 가져온다
Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities = Page<MapSheetAnalDataInferenceGeomEntity> mapSheetAnalDataGeomEntities =
mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq); mapSheetAnalDataRepository.listInferenceResultWithGeom(dataIds, searchReq);
return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity); return mapSheetAnalDataGeomEntities.map(MapSheetAnalDataInferenceGeomEntity::toEntity);
} }
@@ -226,14 +226,14 @@ public class InferenceResultCoreService {
@Transactional(readOnly = true) @Transactional(readOnly = true)
public List<MapSheet> listGetScenes5k(Long analyId) { public List<MapSheet> listGetScenes5k(Long analyId) {
List<String> sceneCodes = List<String> sceneCodes =
mapSheetAnalDataRepository.listAnalyGeom(analyId).stream() mapSheetAnalDataRepository.listAnalyGeom(analyId).stream()
.mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum) .mapToLong(MapSheetAnalDataInferenceEntity::getMapSheetNum)
.mapToObj(String::valueOf) .mapToObj(String::valueOf)
.toList(); .toList();
return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream() return mapInkx5kRepository.listGetScenes5k(sceneCodes).stream()
.map(MapInkx5kEntity::toEntity) .map(MapInkx5kEntity::toEntity)
.toList(); .toList();
} }
/** /**
@@ -243,8 +243,9 @@ public class InferenceResultCoreService {
*/ */
public void update(SaveInferenceAiDto request) { public void update(SaveInferenceAiDto request) {
MapSheetLearnEntity entity = MapSheetLearnEntity entity =
mapSheetLearnRepository.getInferenceResultByUuid(request.getUuid()) mapSheetLearnRepository
.orElseThrow(EntityNotFoundException::new); .getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new);
// M1/M2/M3 영역 업데이트 // M1/M2/M3 영역 업데이트
if (request.getType() != null) { if (request.getType() != null) {
@@ -266,50 +267,49 @@ public class InferenceResultCoreService {
private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) { private void applyModelUpdate(MapSheetLearnEntity entity, SaveInferenceAiDto request) {
switch (request.getType()) { switch (request.getType()) {
case "M1" -> applyModelFields( case "M1" ->
request, applyModelFields(
entity::setM1ModelBatchId, request,
entity::setM1ModelStartDttm, entity::setM1ModelBatchId,
entity::setM1ModelEndDttm, entity::setM1ModelStartDttm,
entity::setM1PendingJobs, entity::setM1ModelEndDttm,
entity::setM1RunningJobs, entity::setM1PendingJobs,
entity::setM1CompletedJobs, entity::setM1RunningJobs,
entity::setM1FailedJobs entity::setM1CompletedJobs,
); entity::setM1FailedJobs);
case "M2" -> applyModelFields( case "M2" ->
request, applyModelFields(
entity::setM2ModelBatchId, request,
entity::setM2ModelStartDttm, entity::setM2ModelBatchId,
entity::setM2ModelEndDttm, entity::setM2ModelStartDttm,
entity::setM2PendingJobs, entity::setM2ModelEndDttm,
entity::setM2RunningJobs, entity::setM2PendingJobs,
entity::setM2CompletedJobs, entity::setM2RunningJobs,
entity::setM2FailedJobs entity::setM2CompletedJobs,
); entity::setM2FailedJobs);
case "M3" -> applyModelFields( case "M3" ->
request, applyModelFields(
entity::setM3ModelBatchId, request,
entity::setM3ModelStartDttm, entity::setM3ModelBatchId,
entity::setM3ModelEndDttm, entity::setM3ModelStartDttm,
entity::setM3PendingJobs, entity::setM3ModelEndDttm,
entity::setM3RunningJobs, entity::setM3PendingJobs,
entity::setM3CompletedJobs, entity::setM3RunningJobs,
entity::setM3FailedJobs entity::setM3CompletedJobs,
); entity::setM3FailedJobs);
default -> throw new IllegalArgumentException("Unknown type: " + request.getType()); default -> throw new IllegalArgumentException("Unknown type: " + request.getType());
} }
} }
private void applyModelFields( private void applyModelFields(
SaveInferenceAiDto request, SaveInferenceAiDto request,
Consumer<Long> setBatchId, Consumer<Long> setBatchId,
Consumer<ZonedDateTime> setStart, Consumer<ZonedDateTime> setStart,
Consumer<ZonedDateTime> setEnd, Consumer<ZonedDateTime> setEnd,
Consumer<Integer> setPending, Consumer<Integer> setPending,
Consumer<Integer> setRunning, Consumer<Integer> setRunning,
Consumer<Integer> setCompleted, Consumer<Integer> setCompleted,
Consumer<Integer> setFailed Consumer<Integer> setFailed) {
) {
applyIfNotNull(request.getBatchId(), setBatchId); applyIfNotNull(request.getBatchId(), setBatchId);
applyIfNotNull(request.getModelStartDttm(), setStart); applyIfNotNull(request.getModelStartDttm(), setStart);
applyIfNotNull(request.getModelEndDttm(), setEnd); applyIfNotNull(request.getModelEndDttm(), setEnd);
@@ -331,7 +331,7 @@ public class InferenceResultCoreService {
public InferenceBatchSheet getInferenceResultByStatus(String status) { public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity = MapSheetLearnEntity entity =
mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null); mapSheetLearnRepository.getInferenceResultByStatus(status).orElse(null);
if (entity == null) { if (entity == null) {
return null; return null;
@@ -377,7 +377,7 @@ public class InferenceResultCoreService {
/** /**
* @param compareYear 비교년도 * @param compareYear 비교년도
* @param targetYear 기준년도 * @param targetYear 기준년도
* @return * @return
*/ */
public Integer getLearnStage(Integer compareYear, Integer targetYear) { public Integer getLearnStage(Integer compareYear, Integer targetYear) {

View File

@@ -47,9 +47,7 @@ public class MapSheetInferenceJobService {
@Value("${inference.url}") @Value("${inference.url}")
private String inferenceUrl; private String inferenceUrl;
/** /** 추론 진행 배치 1분 */
* 추론 진행 배치 1분
*/
@Scheduled(fixedDelay = 60_000) @Scheduled(fixedDelay = 60_000)
@Transactional @Transactional
public void runBatch() { public void runBatch() {
@@ -132,7 +130,7 @@ public class MapSheetInferenceJobService {
String url = batchUrl + "/" + batchId; String url = batchUrl + "/" + batchId;
ExternalCallResult<String> result = ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, jsonHeaders(), String.class); externalHttpClient.call(url, HttpMethod.GET, null, jsonHeaders(), String.class);
int status = result.statusCode(); int status = result.statusCode();
if (status < 200 || status >= 300) { if (status < 200 || status >= 300) {
@@ -266,23 +264,28 @@ public class MapSheetInferenceJobService {
// 추론 실행 api 파라미터 조회 // 추론 실행 api 파라미터 조회
InferenceProgressDto progressDto = InferenceProgressDto progressDto =
inferenceResultCoreService.getInferenceAiResultById(id, modelUuid); inferenceResultCoreService.getInferenceAiResultById(id, modelUuid);
// ai 에 맞는 모델 명으로 변경 // ai 에 맞는 모델 명으로 변경
String inferenceType = modelToInferenceType(type); String inferenceType = modelToInferenceType(type);
InferenceSendDto.pred_requests_areas predRequestsAreas = new InferenceSendDto.pred_requests_areas(); InferenceSendDto.pred_requests_areas predRequestsAreas =
new InferenceSendDto.pred_requests_areas();
predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year()); predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year());
predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year()); predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year());
predRequestsAreas.setInput1_scene_path(progressDto.getPred_requests_areas().getInput1_scene_path()); predRequestsAreas.setInput1_scene_path(
predRequestsAreas.setInput2_scene_path(progressDto.getPred_requests_areas().getInput2_scene_path()); progressDto.getPred_requests_areas().getInput1_scene_path());
predRequestsAreas.setInput2_scene_path(
progressDto.getPred_requests_areas().getInput2_scene_path());
InferenceSendDto m = new InferenceSendDto(); InferenceSendDto m = new InferenceSendDto();
m.setPred_requests_areas(predRequestsAreas); m.setPred_requests_areas(predRequestsAreas);
m.setModel_version(progressDto.getModelVersion()); m.setModel_version(progressDto.getModelVersion());
m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName()); m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName());
m.setCd_model_config(progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName()); m.setCd_model_config(
m.setCls_model_path(progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName()); progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
m.setCls_model_path(
progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
m.setCls_model_version(progressDto.getClsModelVersion()); m.setCls_model_version(progressDto.getClsModelVersion());
m.setCd_model_type(inferenceType); m.setCd_model_type(inferenceType);
m.setPriority(progressDto.getPriority()); m.setPriority(progressDto.getPriority());
@@ -353,7 +356,7 @@ public class MapSheetInferenceJobService {
headers.setAccept(List.of(MediaType.APPLICATION_JSON)); headers.setAccept(List.of(MediaType.APPLICATION_JSON));
ExternalCallResult<String> result = ExternalCallResult<String> result =
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class); externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
if (result.statusCode() < 200 || result.statusCode() >= 300) { if (result.statusCode() < 200 || result.statusCode() >= 300) {
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body()); log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
@@ -363,8 +366,7 @@ public class MapSheetInferenceJobService {
// 4) 응답 파싱 // 4) 응답 파싱
try { try {
List<Map<String, Object>> list = List<Map<String, Object>> list =
objectMapper.readValue(result.body(), new TypeReference<>() { objectMapper.readValue(result.body(), new TypeReference<>() {});
});
if (list.isEmpty()) { if (list.isEmpty()) {
throw new IllegalStateException("Inference response is empty"); throw new IllegalStateException("Inference response is empty");
@@ -392,10 +394,9 @@ public class MapSheetInferenceJobService {
return "local".equalsIgnoreCase(profile); return "local".equalsIgnoreCase(profile);
} }
/** /** 모델별 추론 종료 update */
* 모델별 추론 종료 update private void updateProcessingEndTimeByModel(
*/ JobStatusDto dto, UUID uuid, ZonedDateTime dateTime, String type) {
private void updateProcessingEndTimeByModel(JobStatusDto dto, UUID uuid, ZonedDateTime dateTime, String type) {
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto(); SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(uuid); saveInferenceAiDto.setUuid(uuid);
saveInferenceAiDto.setUpdateUid(0L); saveInferenceAiDto.setUpdateUid(0L);