shp 생성 컨트롤러 이동, 주석추가

This commit is contained in:
2026-02-27 21:04:18 +09:00
parent 086eb20e8d
commit af2721949c
13 changed files with 181 additions and 96 deletions

View File

@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.scheduler;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
@@ -11,6 +13,7 @@ import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataReviewJobService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
@@ -18,6 +21,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
@@ -28,7 +32,7 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "스케줄링 수동 호출 테스트", description = "스케줄링 수동 호출 테스트 API")
@Tag(name = "스케줄링 및 jar 수동 호출 테스트", description = "스케줄링 및 jar 수동 호출 테스트 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/schedule")
@@ -42,6 +46,7 @@ public class SchedulerApiController {
private final TrainingDataReviewJobService trainingDataReviewJobService;
private final MemberInactiveJobService memberInactiveJobService;
private final MapSheetMngFileJobController mapSheetMngFileJobController;
private final InferenceResultShpService inferenceResultShpService;
private final GukYuinApiService gukYuinApiService;
@Operation(summary = "국유인 탐지객체 조회 PNU 업데이트 스케줄링", description = "국유인 탐지객체 조회 PNU 업데이트 스케줄링")
@@ -137,4 +142,37 @@ public class SchedulerApiController {
return ApiResponseDto.createOK("OK");
}
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save/inference/{uuid}")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
@PathVariable
UUID uuid) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(uuid));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/inference/{uuid}")
public ApiResponseDto<Void> createShp(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
@PathVariable
UUID uuid) {
// shp 파일 수동생성
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
}

View File

@@ -214,7 +214,7 @@ public class MapSheetInferenceJobService {
// 현재 모델 종료 업데이트
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
// M3이면 전체 종료
// G3이면 전체 종료
if (ModelType.G3.getId().equals(currentType)) {
endAll(sheet, now);
return;
@@ -235,6 +235,13 @@ public class MapSheetInferenceJobService {
* @param now
*/
private void endAll(InferenceBatchSheet sheet, ZonedDateTime now) {
List<Long> batchIds =
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
.filter(Objects::nonNull)
.distinct()
.toList();
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.END.getId());
@@ -246,12 +253,6 @@ public class MapSheetInferenceJobService {
inferenceResultCoreService.upsertGeomData(sheet.getId());
// 추론 종료일때 shp 파일 생성
List<Long> batchIds =
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
.filter(Objects::nonNull)
.distinct()
.toList();
String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(","));
// shp 파일 비동기 생성

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.kamcoback.scheduler.service;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock;
import java.nio.file.Paths;
import lombok.RequiredArgsConstructor;
@@ -13,11 +14,15 @@ import org.springframework.stereotype.Service;
@RequiredArgsConstructor
public class ShpPipelineService {
private final InferenceResultCoreService inferenceResultCoreService;
private final ExternalJarRunner externalJarRunner;
private final ShpKeyLock shpKeyLock;
@Async("shpExecutor")
public void runPipeline(String jarPath, String datasetDir, String batchId, String inferenceId) {
public void runPipeline(String jarPath, String datasetDir, String batchIds, String inferenceId) {
//
// batchIds.split(",")
// inferenceResultCoreService.getInferenceResultCnt();
// inferenceId 기준 동시 실행 제한
if (!shpKeyLock.tryLock(inferenceId)) {
@@ -27,7 +32,7 @@ public class ShpPipelineService {
try {
// uid 기준 merge shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "", "MERGED");
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED");
// uid 기준 shp 파일 geoserver 등록
String register =
@@ -36,7 +41,7 @@ public class ShpPipelineService {
externalJarRunner.run(jarPath, register, inferenceId);
// uid 기준 도엽별 shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "", "RESOLVE");
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
log.info("SHP pipeline finished. inferenceId={}", inferenceId);