추론 실행 영구제외 조건 추가

추론 결과 UID 앞 8자리 추가
종료->완료, 종료->강제종료 상태 변경
This commit is contained in:
2026-01-21 19:05:03 +09:00
parent e70307b753
commit c70abbdb6d
11 changed files with 159 additions and 29 deletions

View File

@@ -3,15 +3,22 @@ package com.kamco.cd.kamcoback.common.api;
import com.kamco.cd.kamcoback.common.api.HelloDto.Res;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.common.service.HelloService;
import io.swagger.v3.oas.annotations.Hidden;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient;
import com.kamco.cd.kamcoback.config.resttemplate.ExternalHttpClient.ExternalCallResult;
import io.swagger.v3.oas.annotations.Parameter;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Hidden
@Log4j2
@RequiredArgsConstructor
@RestController
@RequestMapping("/api/hello")
@@ -19,6 +26,7 @@ public class HelloApiController {
private final HelloService helloService;
private final ExternalJarRunner externalJarRunner;
private final ExternalHttpClient externalHttpClient;
@GetMapping
public HelloDto.Res hello(HelloDto.Req req) {
@@ -40,4 +48,24 @@ public class HelloApiController {
String mapIds) {
externalJarRunner.run(jarPath, batchIds, inferenceId, mapIds);
}
@GetMapping("/batch/{batchId}")
public String batch(@PathVariable String batchId) {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.setAccept(List.of(MediaType.APPLICATION_JSON));
String url = "http://10.100.0.11:8000/batches" + "/" + batchId;
ExternalCallResult<String> result =
externalHttpClient.call(url, HttpMethod.GET, null, headers, String.class);
int status = result.statusCode();
if (status == 404) {
log.info("Batch not found. batchId={}", batchId);
return null;
}
if (status < 200 || status >= 300) {
return null;
}
return result.toString();
}
}

View File

@@ -13,7 +13,7 @@ import org.springframework.stereotype.Component;
@Component
public class ExternalJarRunner {
private static final long TIMEOUT_MINUTES = 30;
private static final long TIMEOUT_MINUTES = TimeUnit.DAYS.toMinutes(3);
/**
* shp 파일 생성

View File

@@ -13,6 +13,7 @@ import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@@ -42,4 +43,15 @@ public class InferenceResultShpApiController {
@PathVariable Long learnId) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/{uid}")
public ApiResponseDto<Void> createShp(
@PathVariable String uid,
@RequestParam Long m1BatchId,
@RequestParam Long m2BatchId,
@RequestParam Long m3BatchId) {
inferenceResultShpService.createShp(uid, m1BatchId, m2BatchId, m3BatchId);
return ApiResponseDto.createOK(null);
}
}

View File

@@ -304,6 +304,7 @@ public class InferenceDetailDto {
Double classAfterProb;
Long mapSheetNum;
String mapSheetName;
String subUid;
// @JsonIgnore String gemoStr;
// @JsonIgnore String geomCenterStr;
@@ -321,7 +322,8 @@ public class InferenceDetailDto {
String classAfterCd,
Double classAfterProb,
Long mapSheetNum,
String mapSheetName) {
String mapSheetName,
String subUid) {
this.uuid = uuid;
this.uid = uid;
this.compareYyyy = compareYyyy;
@@ -335,6 +337,7 @@ public class InferenceDetailDto {
this.classAfterProb = classAfterProb;
this.mapSheetNum = mapSheetNum;
this.mapSheetName = mapSheetName;
this.subUid = subUid;
// this.gemoStr = gemoStr;
// this.geomCenterStr = geomCenterStr;
//
@@ -440,6 +443,7 @@ public class InferenceDetailDto {
@JsonFormatDttm private ZonedDateTime inferEndDttm;
private Integer stage;
private String elapsedDuration;
private String subUid;
public AnalResultInfo(
String analTitle,
@@ -452,7 +456,8 @@ public class InferenceDetailDto {
String mapSheetScope,
ZonedDateTime inferStartDttm,
ZonedDateTime inferEndDttm,
Integer stage) {
Integer stage,
String subUid) {
this.analTitle = analTitle;
this.modelVer1 = modelVer1;
this.modelVer2 = modelVer2;
@@ -464,6 +469,7 @@ public class InferenceDetailDto {
this.inferStartDttm = inferStartDttm;
this.inferEndDttm = inferEndDttm;
this.stage = stage;
this.subUid = subUid;
Duration elapsed =
(inferStartDttm != null && inferEndDttm != null)
? Duration.between(inferStartDttm, inferEndDttm)

View File

@@ -85,8 +85,8 @@ public class InferenceResultDto {
public enum Status implements EnumType {
READY("대기"),
IN_PROGRESS("진행중"),
END(""),
;
END(""),
FORCED_END("강제종료");
private final String desc;
public static Status fromCode(String code) {

View File

@@ -558,11 +558,11 @@ public class InferenceResultService {
externalHttpClient.call(url, HttpMethod.DELETE, dto, headers, String.class);
if (!result.success()) {
log.warn("Failed to delete inference result");
throw new CustomApiException("BAD_GATEWAY", HttpStatus.BAD_GATEWAY);
}
SaveInferenceAiDto request = new SaveInferenceAiDto();
request.setStatus(Status.END.getId());
request.setStatus(Status.FORCED_END.getId());
request.setUuid(dto.getUuid());
request.setUpdateUid(userUtil.getId());
request.setInferEndDttm(ZonedDateTime.now());

View File

@@ -1,7 +1,12 @@
package com.kamco.cd.kamcoback.inference.service;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultsTestingDto;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultShpCoreService;
import com.kamco.cd.kamcoback.scheduler.service.ShpPipelineService;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@@ -13,13 +18,49 @@ import org.springframework.transaction.annotation.Transactional;
public class InferenceResultShpService {
private final InferenceResultShpCoreService coreService;
private final InferenceResultCoreService inferenceResultCoreService;
private final ShpPipelineService shpPipelineService;
@Value("${mapsheet.shp.baseurl}")
private String baseDir;
@Value("${inference.jar-path}")
private String jarPath;
@Value("${file.dataset-dir}")
private String datasetDir;
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
@Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(Long id) {
return coreService.buildInferenceData(id);
}
public void createShp(String uid, Long m1BatchId, Long m2BatchId, Long m3BatchId) {
List<Long> batchIds = new ArrayList<>();
batchIds.add(m1BatchId);
batchIds.add(m2BatchId);
batchIds.add(m3BatchId);
List<InferenceResultsTestingDto.ShpDto> resultList =
inferenceResultCoreService.getInferenceResults(batchIds);
String inferenceId = "";
StringBuilder sb = new StringBuilder();
for (InferenceResultsTestingDto.ShpDto dto : resultList) {
if (dto.getMapId() == null) {
continue;
}
if (!sb.isEmpty()) {
sb.append(",");
}
sb.append("\"").append(dto.getMapId()).append("\"");
}
inferenceId = uid;
String mapIds = sb.toString();
String batchId = m1BatchId + "," + m2BatchId + "," + m3BatchId;
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, inferenceId, mapIds);
}
}

View File

@@ -2,7 +2,6 @@ package com.kamco.cd.kamcoback.menu;
import com.kamco.cd.kamcoback.common.utils.UserUtil;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.menu.dto.MenuDto;
import com.kamco.cd.kamcoback.menu.dto.MyMenuDto;
import com.kamco.cd.kamcoback.menu.service.MyMenuService;
import io.swagger.v3.oas.annotations.Operation;
@@ -26,15 +25,45 @@ public class MyMenuApiController {
private final MyMenuService myMenuService;
@Operation(summary = "사용자별 메뉴 조회", description = "로그인 사용자별 권한 메뉴 목록")
@ApiResponses(
value = {
@ApiResponses({
@ApiResponse(
responseCode = "200",
description = "조회 성공",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = MenuDto.Basic.class))),
schema =
@Schema(
type = "object",
example =
"""
{
"data": [
{
"id": "string",
"name": "string",
"menuUrl": null,
"order": 0,
"children": [
{
"id": "string",
"name": "string",
"menuUrl": "string",
"order": 0,
"children": []
},
{
"id": "string",
"name": "string",
"menuUrl": "string",
"order": 0,
"children": []
}
]
}
]
}
"""))),
@ApiResponse(responseCode = "404", description = "코드를 찾을 수 없음", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})

View File

@@ -311,7 +311,8 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
mapSheetLearnEntity.mapSheetScope,
mapSheetLearnEntity.inferStartDttm,
mapSheetLearnEntity.inferEndDttm,
mapSheetLearnEntity.stage))
mapSheetLearnEntity.stage,
Expressions.stringTemplate("substring({0} from 1 for 8)", mapSheetLearnEntity.uid)))
.from(mapSheetLearnEntity)
.leftJoin(m1)
.on(mapSheetLearnEntity.m1ModelUuid.eq(m1.uuid))
@@ -426,7 +427,10 @@ public class MapSheetLearnRepositoryImpl implements MapSheetLearnRepositoryCusto
mapSheetAnalDataInferenceGeomEntity.classAfterCd,
mapSheetAnalDataInferenceGeomEntity.classAfterProb,
mapSheetAnalDataInferenceGeomEntity.mapSheetNum,
mapInkx5kEntity.mapidNm))
mapInkx5kEntity.mapidNm,
Expressions.stringTemplate(
"substring({0} from 1 for 8)",
mapSheetAnalDataInferenceGeomEntity.resultUid)))
.from(mapSheetAnalInferenceEntity)
.join(mapSheetAnalDataInferenceEntity)
.on(mapSheetAnalDataInferenceEntity.analUid.eq(mapSheetAnalInferenceEntity.id))

View File

@@ -7,6 +7,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetMngHstEntity.mapSh
import static com.kamco.cd.kamcoback.postgres.entity.QYearEntity.yearEntity;
import static com.querydsl.core.types.dsl.Expressions.nullExpression;
import com.kamco.cd.kamcoback.common.enums.CommonUseStatus;
import com.kamco.cd.kamcoback.common.geometry.GeoJsonFileWriter.ImageFeature;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultDto.MapSheetScope;
@@ -531,6 +532,12 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
nullExpression(Integer.class),
nullExpression(Boolean.class)))
.from(mapSheetMngHstEntity)
.innerJoin(mapInkx5kEntity)
.on(
mapInkx5kEntity
.mapidcdNo
.eq(mapSheetMngHstEntity.mapSheetNum)
.and(mapInkx5kEntity.useInference.eq(CommonUseStatus.USE)))
.where(whereBuilder)
.fetch();
}
@@ -580,6 +587,8 @@ public class MapSheetMngRepositoryImpl extends QuerydslRepositorySupport
ORDER BY x::int DESC
LIMIT 1
) s ON true
INNER JOIN tb_map_inkx_5k tmik
ON t.map_sheet_num = tmik.mapidcd_no AND tmik.use_inference = 'USE'
WHERE t.map_sheet_num = ANY(:mapIds)
""";

View File

@@ -26,16 +26,17 @@ public class ShpPipelineService {
}
try {
// 1 uid 기준 도엽별 shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, mapIds);
// 2 uid 기준 merge shp, geojson 파일 생성
// uid 기준 merge shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "");
// 3 uid 기준 shp 파일 geoserver 등록
// uid 기준 shp 파일 geoserver 등록
String register = datasetDir + "/" + inferenceId + "/merge/" + inferenceId + ".shp";
externalJarRunner.run(jarPath, register, inferenceId);
// uid 기준 도엽별 shp, geojson 파일 생성
// TODO 도엽별은 속도 확인 후 다시 체크
// externalJarRunner.run(jarPath, batchId, inferenceId, mapIds);
log.info("SHP pipeline finished. inferenceId={}", inferenceId);
} catch (Exception e) {