[KC-116] shp 파일 생성 기능 수정
This commit is contained in:
@@ -53,7 +53,9 @@ public class MapSheetInferenceJobService {
|
||||
@Value("${inference.jar-path}")
|
||||
private String jarPath;
|
||||
|
||||
/** 추론 진행 배치 1분 */
|
||||
/**
|
||||
* 추론 진행 배치 1분
|
||||
*/
|
||||
@Scheduled(fixedDelay = 60_000)
|
||||
public void runBatch() {
|
||||
if (isLocalProfile()) {
|
||||
@@ -138,7 +140,7 @@ public class MapSheetInferenceJobService {
|
||||
String url = batchUrl + "/" + batchId;
|
||||
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(url, HttpMethod.GET, null, jsonHeaders(), String.class);
|
||||
externalHttpClient.call(url, HttpMethod.GET, null, jsonHeaders(), String.class);
|
||||
|
||||
int status = result.statusCode();
|
||||
if (status == 404) {
|
||||
@@ -220,7 +222,7 @@ public class MapSheetInferenceJobService {
|
||||
batchIds.add(sheet.getM3BatchId());
|
||||
|
||||
List<InferenceResultsTestingDto.ShpDto> resultList =
|
||||
inferenceResultCoreService.getInferenceResults(batchIds);
|
||||
inferenceResultCoreService.getInferenceResults(batchIds);
|
||||
String inferenceId = "";
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
@@ -237,8 +239,10 @@ public class MapSheetInferenceJobService {
|
||||
String mapIds = sb.toString();
|
||||
String batchId = sheet.getM1BatchId() + "," + sheet.getM2BatchId() + "," + sheet.getM3BatchId();
|
||||
|
||||
// uid 기준 도엽별 shp, geojson 파일 생성
|
||||
externalJarRunner.run(jarPath, batchId, inferenceId, mapIds);
|
||||
|
||||
// uid 기준 merge shp, geojson 파일 생성
|
||||
externalJarRunner.run(jarPath, batchId, inferenceId, "");
|
||||
}
|
||||
|
||||
@@ -308,28 +312,28 @@ public class MapSheetInferenceJobService {
|
||||
|
||||
// 추론 실행 api 파라미터 조회
|
||||
InferenceProgressDto progressDto =
|
||||
inferenceResultCoreService.getInferenceAiResultById(id, modelUuid);
|
||||
inferenceResultCoreService.getInferenceAiResultById(id, modelUuid);
|
||||
|
||||
// ai 에 맞는 모델 명으로 변경
|
||||
String inferenceType = modelToInferenceType(type);
|
||||
|
||||
InferenceSendDto.pred_requests_areas predRequestsAreas =
|
||||
new InferenceSendDto.pred_requests_areas();
|
||||
new InferenceSendDto.pred_requests_areas();
|
||||
predRequestsAreas.setInput1_year(progressDto.getPred_requests_areas().getInput1_year());
|
||||
predRequestsAreas.setInput2_year(progressDto.getPred_requests_areas().getInput2_year());
|
||||
predRequestsAreas.setInput1_scene_path(
|
||||
progressDto.getPred_requests_areas().getInput1_scene_path());
|
||||
progressDto.getPred_requests_areas().getInput1_scene_path());
|
||||
predRequestsAreas.setInput2_scene_path(
|
||||
progressDto.getPred_requests_areas().getInput2_scene_path());
|
||||
progressDto.getPred_requests_areas().getInput2_scene_path());
|
||||
|
||||
InferenceSendDto m = new InferenceSendDto();
|
||||
m.setPred_requests_areas(predRequestsAreas);
|
||||
m.setModel_version(progressDto.getModelVersion());
|
||||
m.setCd_model_path(progressDto.getCdModelPath() + "/" + progressDto.getCdModelFileName());
|
||||
m.setCd_model_config(
|
||||
progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
|
||||
progressDto.getCdModelConfigPath() + "/" + progressDto.getCdModelConfigFileName());
|
||||
m.setCls_model_path(
|
||||
progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
|
||||
progressDto.getCdModelClsPath() + "/" + progressDto.getCdModelClsFileName());
|
||||
m.setCls_model_version(progressDto.getClsModelVersion());
|
||||
m.setCd_model_type(inferenceType);
|
||||
m.setPriority(progressDto.getPriority());
|
||||
@@ -400,7 +404,7 @@ public class MapSheetInferenceJobService {
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
|
||||
|
||||
ExternalCallResult<String> result =
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
externalHttpClient.call(inferenceUrl, HttpMethod.POST, dto, headers, String.class);
|
||||
|
||||
if (result.statusCode() < 200 || result.statusCode() >= 300) {
|
||||
log.error("Inference API failed. status={}, body={}", result.statusCode(), result.body());
|
||||
@@ -410,7 +414,8 @@ public class MapSheetInferenceJobService {
|
||||
// 4) 응답 파싱
|
||||
try {
|
||||
List<Map<String, Object>> list =
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {});
|
||||
objectMapper.readValue(result.body(), new TypeReference<>() {
|
||||
});
|
||||
|
||||
if (list.isEmpty()) {
|
||||
throw new IllegalStateException("Inference response is empty");
|
||||
@@ -438,9 +443,11 @@ public class MapSheetInferenceJobService {
|
||||
return "local".equalsIgnoreCase(profile);
|
||||
}
|
||||
|
||||
/** 모델별 추론 종료 update */
|
||||
/**
|
||||
* 모델별 추론 종료 update
|
||||
*/
|
||||
private void updateProcessingEndTimeByModel(
|
||||
JobStatusDto dto, UUID uuid, ZonedDateTime dateTime, String type) {
|
||||
JobStatusDto dto, UUID uuid, ZonedDateTime dateTime, String type) {
|
||||
SaveInferenceAiDto saveInferenceAiDto = new SaveInferenceAiDto();
|
||||
saveInferenceAiDto.setUuid(uuid);
|
||||
saveInferenceAiDto.setUpdateUid(0L);
|
||||
@@ -453,9 +460,9 @@ public class MapSheetInferenceJobService {
|
||||
inferenceResultCoreService.update(saveInferenceAiDto);
|
||||
|
||||
List<Long> failedIds =
|
||||
Optional.ofNullable(dto.getFailedIds()).orElse(List.of()).stream()
|
||||
.map(Long::valueOf)
|
||||
.toList();
|
||||
Optional.ofNullable(dto.getFailedIds()).orElse(List.of()).stream()
|
||||
.map(Long::valueOf)
|
||||
.toList();
|
||||
|
||||
// 도엽별 실패여부 업데이트
|
||||
inferenceResultCoreService.saveFail5k(uuid, failedIds, type);
|
||||
|
||||
Reference in New Issue
Block a user