Merge pull request 'feat/infer_dev_260107' (#203) from feat/infer_dev_260107 into develop

Reviewed-on: https://kamco.gitea.gs.dabeeo.com/dabeeo/kamco-dabeeo-backoffice/pulls/203
This commit is contained in:
2026-01-12 21:08:58 +09:00
14 changed files with 402 additions and 22 deletions

View File

@@ -3,6 +3,7 @@ package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.service.InferenceResultService;
import com.kamco.cd.kamcoback.mapsheet.service.MapSheetMngService;
@@ -23,6 +24,7 @@ import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
@@ -237,4 +239,28 @@ public class InferenceResultApiController {
return ApiResponseDto.ok(inferenceResultService.getInferenceServerStatusList());
}
@Operation(summary = "추론관리 진행현황 상세", description = "어드민 홈 > 추론관리 > 추론관리 > 진행현황 상세")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "200",
description = "검색 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = Page.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@GetMapping("/status/{uuid}")
public ApiResponseDto<InferenceStatusDetailDto> getInferenceStatus(
@io.swagger.v3.oas.annotations.parameters.RequestBody(
description = "모델 삭제 요청 정보",
required = true)
@PathVariable
UUID uuid) {
return ApiResponseDto.ok(inferenceResultService.getInferenceStatus(uuid));
}
}

View File

@@ -391,4 +391,17 @@ public class InferenceDetailDto {
return PageRequest.of(page, size);
}
}
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public static class InferenceBatchSheet {
private Long m1BatchId;
private Long m2BatchId;
private Long m3BatchId;
private String status;
private String runningModelType;
}
}

View File

@@ -129,10 +129,7 @@ public class InferenceResultDto {
}
public static Status fromCode(String code) {
return Arrays.stream(values())
.filter(v -> v.name().equals(code))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("알 수 없는 상태 코드: " + code));
return Arrays.stream(values()).filter(v -> v.name().equals(code)).findFirst().orElse(null);
}
public static String getDescByCode(String code) {
@@ -198,6 +195,69 @@ public class InferenceResultDto {
private String mapSheetName;
}
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public static class InferenceStatusDetailDto {
private String title;
private Integer compareYyyy;
private Integer targetYyyy;
private String detectOption;
private String mapSheetScope;
@JsonFormatDttm private ZonedDateTime inferStartDttm;
@JsonFormatDttm private ZonedDateTime inferEndDttm;
private Long detectingCnt;
private String model1Ver;
private String model2Ver;
private String model3Ver;
public InferenceStatusDetailDto(
String title,
Integer compareYyyy,
Integer targetYyyy,
String detectOption,
String mapSheetScope,
ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm,
Long detectingCnt,
String model1Ver,
String model2Ver,
String model3Ver) {
this.title = title;
this.compareYyyy = compareYyyy;
this.targetYyyy = targetYyyy;
this.detectOption = detectOption;
this.mapSheetScope = mapSheetScope;
this.inferStartDttm = inferStartDttm;
this.inferEndDttm = inferEndDttm;
this.detectingCnt = detectingCnt;
this.model1Ver = model1Ver;
this.model2Ver = model2Ver;
this.model3Ver = model3Ver;
}
private String usedServerName;
private Long detectingEndCnt = 7L;
private String model1VerStatus = "PROCCESING";
private String model1VerStatusName = "진행중";
private String model2VerStatus = "PROCCESING";
private String model2VerStatusName = "진행중";
private String model3VerStatus = "PROCCESING";
private String model4VerStatusName = "진행중";
public String getDetectOptionName() {
if (this.detectOption.equals("EXCL")) return "추론제외";
return "이전 년도 도엽 사용";
}
public String getMapSheetScopeName() {
if (this.detectOption.equals("ALL")) return "전체";
return "부분";
}
}
@Getter
@Setter
@NoArgsConstructor
@@ -236,34 +296,46 @@ public class InferenceResultDto {
public String getCpuStatus() {
String enumId = "SAFETY";
if (this.cpu_user + this.cpu_system >= 80) enumId = "CAUTION";
if (this.cpu_user + this.cpu_system >= 80) {
enumId = "CAUTION";
}
return enumId;
}
public String getGpuStatus() {
String enumId = "SAFETY";
if (this.gpuUtil >= 80) enumId = "CAUTION";
if (this.gpuUtil >= 80) {
enumId = "CAUTION";
}
return enumId;
}
public String getMemStatus() {
String enumId = "SAFETY";
if (this.memused >= 80) enumId = "CAUTION";
if (this.memused >= 80) {
enumId = "CAUTION";
}
return enumId;
}
public String getCpuStatusName() {
if (this.cpu_user + this.cpu_system >= 80) return ServerStatus.CAUTION.getText();
if (this.cpu_user + this.cpu_system >= 80) {
return ServerStatus.CAUTION.getText();
}
return ServerStatus.SAFETY.getText();
}
public String getGpuStatusName() {
if (this.gpuUtil >= 80) return ServerStatus.CAUTION.getText();
if (this.gpuUtil >= 80) {
return ServerStatus.CAUTION.getText();
}
return ServerStatus.SAFETY.getText();
}
public String getMemStatusName() {
if (this.memused >= 80) return ServerStatus.CAUTION.getText();
if (this.memused >= 80) {
return ServerStatus.CAUTION.getText();
}
return ServerStatus.SAFETY.getText();
}
}
@@ -288,4 +360,17 @@ public class InferenceResultDto {
return desc;
}
}
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
public static class SaveInferenceAiDto {
private UUID uuid;
private Long batchId;
private String status;
private String type;
private ZonedDateTime inferStartDttm;
}
}

View File

@@ -12,9 +12,12 @@ import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.DetectOption;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceSendDto.pred_requests_areas;
import com.kamco.cd.kamcoback.mapsheet.dto.MapSheetMngDto.MngListCompareDto;
@@ -24,6 +27,7 @@ import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.MapSheetMngCoreService;
import com.kamco.cd.kamcoback.postgres.core.ModelMngCoreService;
import jakarta.validation.constraints.NotNull;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -199,7 +203,14 @@ public class InferenceResultService {
m3.setPred_requests_areas(predRequestsAreas);
Long batchId = this.ensureAccepted(m1);
inferenceResultCoreService.update(uuid, batchId, "IN_PROGRESS");
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
saveInferenceAiDto.setUuid(uuid);
saveInferenceAiDto.setBatchId(batchId);
saveInferenceAiDto.setStatus(Status.IN_PROGRESS.getId());
saveInferenceAiDto.setType("M1");
saveInferenceAiDto.setInferStartDttm(ZonedDateTime.now());
inferenceResultCoreService.update(saveInferenceAiDto);
}
/**
@@ -391,4 +402,21 @@ public class InferenceResultService {
public List<InferenceServerStatusDto> getInferenceServerStatusList() {
return inferenceResultCoreService.getInferenceServerStatusList();
}
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
List<InferenceServerStatusDto> servers =
inferenceResultCoreService.getInferenceServerStatusList();
String serverNames = "";
for (InferenceServerStatusDto server : servers) {
if (serverNames.equals("")) serverNames = server.getServerName();
else serverNames = serverNames + "," + server.getServerName();
}
InferenceStatusDetailDto dto = inferenceResultCoreService.getInferenceStatus(uuid);
dto.setUsedServerName(serverNames);
return dto;
}
}

View File

@@ -86,8 +86,24 @@ public class ModelMngService {
modelMetricAddReq.setLoss(0);
modelMetricAddReq.setIou(0);
ModelUploadResDto modelUploadResDto = new ModelUploadResDto();
try {
FIleChecker.unzip(addReq.getFileName(), addReq.getFilePath());
this.getUnzipModelFiles(addReq.getFilePath(), modelUploadResDto);
addReq.setCdModelPath(modelUploadResDto.getCdModelPath());
addReq.setCdModelFileName(modelUploadResDto.getCdModelFileName());
addReq.setCdModelConfigPath(modelUploadResDto.getCdModelConfigPath());
addReq.setCdModelConfigFileName(modelUploadResDto.getCdModelConfigFileName());
addReq.setClsModelPath(modelUploadResDto.getClsModelPath());
addReq.setClsModelFileName(modelUploadResDto.getClsModelFileName());
} catch (IOException e) {
throw new RuntimeException(e);
}
ObjectMapper mapper = new ObjectMapper();
String filePath = addReq.getClsModelPath() + addReq.getClsModelFileName();
String filePath = modelUploadResDto.getClsModelPath() + modelUploadResDto.getClsModelFileName();
String dataJson = null;
try {
dataJson = Files.readString(Path.of(filePath));
@@ -126,8 +142,9 @@ public class ModelMngService {
modelUploadResDto.setFileName(upRes.getFileName());
modelUploadResDto.setChunkIndex(upRes.getChunkIndex());
modelUploadResDto.setChunkTotalIndex(upRes.getChunkTotalIndex());
// 압축풀기 (String zipFilePath, String destDirectory)
// 압축풀기 (String zipFilePath, String destDirectory)
/*
if (upRes.getChunkIndex() == upRes.getChunkTotalIndex()) {
try {
FIleChecker.unzip(upRes.getFileName(), upRes.getFilePath());
@@ -138,6 +155,7 @@ public class ModelMngService {
throw new RuntimeException(e);
}
}
*/
return modelUploadResDto;
}

View File

@@ -4,11 +4,14 @@ import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.Dashboard;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.MapSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetNumDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.ResultList;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.SaveInferenceAiDto;
import com.kamco.cd.kamcoback.postgres.entity.MapInkx5kEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceEntity;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetAnalDataInferenceGeomEntity;
@@ -218,17 +221,46 @@ public class InferenceResultCoreService {
.toList();
}
public void update(UUID uuid, Long batchId, String status) {
public void update(SaveInferenceAiDto request) {
MapSheetLearnEntity entity =
mapSheetLearnRepository
.getInferenceResultByUuid(uuid)
.orElseThrow(() -> new EntityNotFoundException(uuid.toString()));
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(() -> new EntityNotFoundException());
entity.setBatchId(batchId);
entity.setStatus(status);
if (request.getType().equals("M1")) {
entity.setM1ModelBatchId(request.getBatchId());
} else if (request.getType().equals("M2")) {
entity.setM2ModelBatchId(request.getBatchId());
} else if (request.getType().equals("M3")) {
entity.setM3ModelBatchId(request.getBatchId());
}
entity.setStatus(request.getStatus());
if (request.getInferStartDttm() != null) {
entity.setInferStartDttm(request.getInferStartDttm());
}
}
public List<InferenceServerStatusDto> getInferenceServerStatusList() {
return mapSheetLearnRepository.getInferenceServerStatusList();
}
public InferenceBatchSheet getInferenceResultByStatus(String status) {
MapSheetLearnEntity entity =
mapSheetLearnRepository
.getInferenceResultByStatus(status)
.orElseThrow(() -> new EntityNotFoundException(status));
InferenceBatchSheet inferenceBatchSheet = new InferenceBatchSheet();
inferenceBatchSheet.setM1BatchId(entity.getM1ModelBatchId());
inferenceBatchSheet.setM2BatchId(entity.getM2ModelBatchId());
inferenceBatchSheet.setM3BatchId(entity.getM3ModelBatchId());
inferenceBatchSheet.setStatus(entity.getStatus());
inferenceBatchSheet.setRunningModelType(entity.getRunningModelType());
return inferenceBatchSheet;
}
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
return mapSheetLearnRepository.getInferenceStatus(uuid);
}
}

View File

@@ -103,8 +103,20 @@ public class MapSheetLearnEntity {
@Column(name = "updated_uid")
private Long updatedUid;
@Column(name = "batch_id")
private Long batchId;
@Column(name = "running_model_type")
private String runningModelType;
@Column(name = "m1_model_batch_id")
private Long m1ModelBatchId;
@Column(name = "m2_model_batch_id")
private Long m2ModelBatchId;
@Column(name = "m3_model_batch_id")
private Long m3ModelBatchId;
@Column(name = "detect_end_cnt")
private Long detectEndCnt;
public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList(

View File

@@ -2,6 +2,7 @@ package com.kamco.cd.kamcoback.postgres.repository.Inference;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import java.util.List;
import java.util.Optional;
@@ -15,4 +16,6 @@ public interface MapSheetLearnRepositoryCustom {
Optional<MapSheetLearnEntity> getInferenceResultByUuid(UUID uuid);
List<InferenceServerStatusDto> getInferenceServerStatusList();
public InferenceStatusDetailDto getInferenceStatus(UUID uuid);
}

View File

@@ -7,7 +7,10 @@ import static com.kamco.cd.kamcoback.postgres.entity.QSystemMetricEntity.systemM
import com.kamco.cd.kamcoback.common.utils.DateRange;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceServerStatusDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.InferenceStatusDetailDto;
import com.kamco.cd.kamcoback.model.service.ModelMngService;
import com.kamco.cd.kamcoback.postgres.entity.MapSheetLearnEntity;
import com.kamco.cd.kamcoback.postgres.entity.QModelMngEntity;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Projections;
import com.querydsl.core.types.dsl.CaseBuilder;
@@ -28,6 +31,7 @@ import org.springframework.stereotype.Repository;
public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCustom {
private final JPAQueryFactory queryFactory;
private final ModelMngService modelMngService;
@Override
public Page<MapSheetLearnEntity> getInferenceMgnResultList(InferenceResultDto.SearchListReq req) {
@@ -121,7 +125,11 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
gpuMetricEntity.gpuUtil))
.from(systemMetricEntity)
.leftJoin(gpuMetricEntity)
.on(gpuMetricEntity.serverName.eq(systemMetricEntity.serverName))
.on(
gpuMetricEntity
.id1
.in(latestGpuIds)
.and(gpuMetricEntity.serverName.eq(systemMetricEntity.serverName)))
.where(systemMetricEntity.id1.in(latestIds)) // In 절 사용
.orderBy(systemMetricEntity.serverName.asc())
.limit(4)
@@ -129,4 +137,42 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
return foundContent;
}
@Override
public InferenceStatusDetailDto getInferenceStatus(UUID uuid) {
String serverNames = "";
QModelMngEntity m1Model = new QModelMngEntity("m1Model");
QModelMngEntity m2Model = new QModelMngEntity("m2Model");
QModelMngEntity m3Model = new QModelMngEntity("m3Model");
InferenceStatusDetailDto foundContent =
queryFactory
.select(
Projections.constructor(
InferenceStatusDetailDto.class,
mapSheetLearnEntity.title,
mapSheetLearnEntity.compareYyyy,
mapSheetLearnEntity.targetYyyy,
mapSheetLearnEntity.detectOption,
mapSheetLearnEntity.mapSheetScope,
mapSheetLearnEntity.inferStartDttm,
mapSheetLearnEntity.inferEndDttm,
mapSheetLearnEntity.detectingCnt,
m1Model.modelVer.as("model1Ver"),
m2Model.modelVer.as("model2Ver"),
m3Model.modelVer.as("model3Ver")))
.from(mapSheetLearnEntity)
.leftJoin(m1Model)
.on(m1Model.uuid.eq(mapSheetLearnEntity.m1ModelUuid))
.leftJoin(m2Model)
.on(m2Model.uuid.eq(mapSheetLearnEntity.m2ModelUuid))
.leftJoin(m3Model)
.on(m3Model.uuid.eq(mapSheetLearnEntity.m3ModelUuid))
.where(mapSheetLearnEntity.uuid.eq(uuid))
.fetchOne();
return foundContent;
}
}

View File

@@ -0,0 +1,52 @@
package com.kamco.cd.kamcoback.scheduler.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.ZonedDateTime;
import java.util.List;
import lombok.Data;
@Data
public class JobStatusDto {
private Long id;
@JsonProperty("created_at")
private ZonedDateTime createdAt;
@JsonProperty("input1_year")
private Integer input1Year;
@JsonProperty("input2_year")
private Integer input2Year;
@JsonProperty("total_jobs")
private Integer totalJobs;
@JsonProperty("pending_jobs")
private Integer pendingJobs;
@JsonProperty("running_jobs")
private Integer runningJobs;
@JsonProperty("completed_jobs")
private Integer completedJobs;
@JsonProperty("failed_jobs")
private Integer failedJobs;
private String status;
private List<Object> jobs;
@JsonProperty("completed_ids")
private List<String> completedIds;
@JsonProperty("processing_ids")
private List<String> processingIds;
@JsonProperty("queued_ids")
private List<String> queuedIds;
@JsonProperty("failed_ids")
private List<String> failedIds;
}

View File

@@ -1,8 +1,20 @@
package com.kamco.cd.kamcoback.scheduler.service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import com.kamco.cd.kamcoback.inference.dto.InferenceDetailDto.InferenceBatchSheet;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.Status;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.dto.JobStatusDto;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
@@ -12,6 +24,12 @@ import org.springframework.stereotype.Service;
public class MapSheetInferenceJobService {
private final InferenceResultCoreService inferenceResultCoreService;
private final ExternalHttpClient externalHttpClient;
private final ObjectMapper objectMapper;
@Value("${inference.batch-url}")
private String batchUrl;
@Scheduled(fixedDelay = 60_000)
public void runBatch() {
@@ -19,8 +37,52 @@ public class MapSheetInferenceJobService {
try {
// TODO: 배치 로직 작성
InferenceBatchSheet batchSheet =
inferenceResultCoreService.getInferenceResultByStatus(Status.IN_PROGRESS.getId());
if (batchSheet == null) {
return;
}
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
Long batchId = 0L;
if (batchSheet.getM3BatchId() != null) {
batchId = batchSheet.getM3BatchId();
} else if (batchSheet.getM2BatchId() != null) {
batchId = batchSheet.getM2BatchId();
} else if (batchSheet.getM1BatchId() != null) {
batchId = batchSheet.getM1BatchId();
}
String url = batchUrl + "/" + batchId;
ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
int status = result.statusCode();
if (status < 200 || status >= 300) {
return;
}
String json = result.body();
JobStatusDto dto = objectMapper.readValue(json, JobStatusDto.class);
if ("COMPLETED".equals(dto.getStatus())) {
String type = batchSheet.getRunningModelType();
// if(type.equals("M3"))
}
System.out.println(dto);
Thread.sleep(3000); // 예시: 처리 시간 3초
} catch (InterruptedException e) {
} catch (InterruptedException | JsonProcessingException e) {
Thread.currentThread().interrupt();
log.error("배치 중 인터럽트 발생", e);
}

View File

@@ -97,4 +97,5 @@ file:
inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/

View File

@@ -83,4 +83,5 @@ file:
inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/

View File

@@ -58,5 +58,6 @@ file:
inference:
url: http://10.100.0.11:8000/jobs
batch-url: http://10.100.0.11:8000/batches
geojson-dir: /kamco-nfs/requests/