shp 생성 컨트롤러 이동, 주석추가

This commit is contained in:
2026-02-27 21:04:18 +09:00
parent 086eb20e8d
commit af2721949c
13 changed files with 181 additions and 96 deletions

47
inference-table-index.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
#############################################
# PostgreSQL INDEX CREATE SCRIPT
# 에러 발생해도 계속 진행
#############################################
# set -e 제거 (중단 안함)
# ===== 환경변수 체크 =====
if [ -z "$DB_HOST" ] || [ -z "$DB_PORT" ] || [ -z "$DB_NAME" ] || [ -z "$DB_USER" ]; then
echo "DB 환경변수가 설정되지 않았습니다."
exit 1
fi
echo "========================================"
echo "START INDEX CREATE"
echo "TIME: $(date)"
echo "========================================"
run_index() {
echo "----------------------------------------"
echo "Running: $1"
psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -c "$1"
echo "----------------------------------------"
}
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_uid_ext_state
ON tb_map_sheet_mng_files (hst_uid, file_ext, file_state);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mng_files_hstuid_ext
ON tb_map_sheet_mng_files (hst_uid, file_ext);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx_no_use
ON tb_map_inkx_5k (mapidcd_no, use_inference);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx5k_mapidcd
ON tb_map_inkx_5k (mapidcd_no);"
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_hst_exec_sheetnum_yyyy_desc
ON tb_map_sheet_mng_hst (map_sheet_num, mng_yyyy DESC);"
echo "========================================"
echo "END INDEX CREATE"
echo "TIME: $(date)"
echo "========================================"
echo "모든 인덱스 시도 완료"

View File

@@ -27,6 +27,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDate;
import java.util.List;
@@ -35,6 +36,7 @@ import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
@@ -373,6 +375,9 @@ public class InferenceResultApiController {
}
Path zipPath = Path.of(path);
if (!Files.isRegularFile(zipPath)) {
return ResponseEntity.status(HttpStatus.NOT_FOUND).body("추론이 완료되지 않아 파일이 생성되지 않았습니다.");
}
return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
}

View File

@@ -1,60 +0,0 @@
package com.kamco.cd.kamcoback.inference;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
@Log4j2
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/inference/shp")
public class InferenceResultShpApiController {
private final InferenceResultShpService inferenceResultShpService;
public static final String MAP_ID =
"{ \"mapIds\": [\"37716096\",\"37716095\",\"37716094\",\"37716091\",\"37716086\",\"37716085\",\"37716084\",\"37716083\",\"37716076\",\"37716066\",\"37716065\",\"37716064\",\"37716063\",\"37716061\",\"37716051\",\"37716011\"] }";
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save/{learnId}")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
@PathVariable Long learnId) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/{uuid}")
public ApiResponseDto<Void> createShp(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae") @PathVariable UUID uuid) {
// shp 파일 수동생성
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
}

View File

@@ -71,14 +71,16 @@ public class InferenceResultShpDto {
@NoArgsConstructor
public static class InferenceCntDto {
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 목록 저장 터이터 건수", example = "120")
@Schema(
description = "추론 결과(inference_results_testing)를 기준으로 데이터 목록 저장 터이터 건수",
example = "120")
int sheetAnalDataCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120")
int inferenceCnt;
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120")
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 Geom 데이터 건수", example = "120")
int inferenceGeomCnt;
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 집계 데이터 건수", example = "120")
int inferenceSttcnt;
}
@Setter

View File

@@ -37,10 +37,16 @@ public class InferenceResultShpService {
@Value("${file.dataset-dir}")
private String datasetDir;
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
/**
* 추론 결과 inference 테이블 upsert
*
* @param uuid learn uuid
* @return
*/
@Transactional
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(Long id) {
return coreService.buildInferenceData(id);
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(UUID uuid) {
Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(uuid);
return coreService.buildInferenceData(learnId);
}
/**
@@ -55,13 +61,13 @@ public class InferenceResultShpService {
return;
}
String batchId =
String batchIds =
Stream.of(dto.getM1ModelBatchId(), dto.getM2ModelBatchId(), dto.getM3ModelBatchId())
.filter(Objects::nonNull)
.map(String::valueOf)
.collect(Collectors.joining(","));
// shp 파일 비동기 생성
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, dto.getUid());
shpPipelineService.runPipeline(jarPath, datasetDir, batchIds, dto.getUid());
}
}

View File

@@ -271,7 +271,7 @@ public class InferenceResultCoreService {
.getInferenceResultByUuid(request.getUuid())
.orElseThrow(EntityNotFoundException::new);
// M1/M2/M3 영역 업데이트
// G1/G2/G3 영역 업데이트
if (request.getType() != null) {
applyModelUpdate(entity, request);
}

View File

@@ -3,10 +3,12 @@ package com.kamco.cd.kamcoback.postgres.core;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
@Log4j2
@RequiredArgsConstructor
public class InferenceResultShpCoreService {
@@ -15,15 +17,27 @@ public class InferenceResultShpCoreService {
/**
* inference_results 기준으로 - tb_map_sheet_anal_data_inference -
* tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다.
*
* @param id learn id
* @return
*/
@Transactional
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
repo.upsertGroupsFromInferenceResults(analId);
repo.upsertGeomsFromInferenceResults(analId);
repo.upsertSttcFromInferenceResults(analId);
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
int analDataCnt = repo.upsertGroupsFromInferenceResults(analId);
int geomCnt = repo.upsertGeomsFromInferenceResults(analId);
int sttcCnt = repo.upsertSttcFromInferenceResults(analId);
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
cntDto.setSheetAnalDataCnt(analDataCnt);
cntDto.setInferenceGeomCnt(geomCnt);
cntDto.setInferenceSttcnt(sttcCnt);
log.info(
"[ANAL SAVE] analId={}, tb_map_sheet_anal_data_inference={}, tb_map_sheet_anal_data_inference_geom={}, tb_map_sheet_anal_sttc={}",
analId,
analDataCnt,
geomCnt,
sttcCnt);
return cntDto;
}
}

View File

@@ -202,6 +202,33 @@ public class MapSheetLearnEntity {
@Column(name = "chn_dtct_mst_id")
private String chnDtctMstId;
@Column(name = "shp_create_status")
private String shp_create_status;
@Column(name = "shp_create_message")
private String shp_create_message;
@Column(name = "shp_create_status_dttm")
private ZonedDateTime shp_create_status_dttm;
@Column(name = "shp_status")
private String shp_status;
@Column(name = "shp_stage")
private String shp_stage;
@Column(name = "shp_started_dttm")
private ZonedDateTime shp_started_dttm;
@Column(name = "shp_ended_dttm")
private ZonedDateTime shp_ended_dttm;
@Column(name = "shp_last_message")
private String shp_last_message;
@Column(name = "shp_error_message")
private String shp_error_message;
public InferenceResultDto.ResultList toDto() {
return new InferenceResultDto.ResultList(
this.uuid,

View File

@@ -8,11 +8,11 @@ public interface InferenceResultRepositoryCustom {
Long upsertGroupsFromMapSheetAnal(Long id);
void upsertGroupsFromInferenceResults(Long analId);
int upsertGroupsFromInferenceResults(Long analId);
void upsertGeomsFromInferenceResults(Long analId);
int upsertGeomsFromInferenceResults(Long analId);
void upsertSttcFromInferenceResults(Long analId);
int upsertSttcFromInferenceResults(Long analId);
Long getInferenceLearnIdByUuid(UUID uuid);

View File

@@ -80,7 +80,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
* @return 반영된 행 수
*/
@Override
public void upsertGroupsFromInferenceResults(Long analId) {
public int upsertGroupsFromInferenceResults(Long analId) {
String sql =
"""
@@ -124,7 +124,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
updated_dttm = now()
""";
em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
return em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
}
/**
@@ -136,7 +136,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
* @return 반영된 행 수
*/
@Override
public void upsertGeomsFromInferenceResults(Long analUid) {
public int upsertGeomsFromInferenceResults(Long analUid) {
String sql =
"""
@@ -229,11 +229,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num
""";
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
}
@Override
public void upsertSttcFromInferenceResults(Long analUid) {
public int upsertSttcFromInferenceResults(Long analUid) {
String sql =
"""
@@ -306,7 +306,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
,updated_uid = EXCLUDED.updated_uid
""";
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
}
// ===============================

View File

@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.scheduler;
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
@@ -11,6 +13,7 @@ import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataLabelJobService;
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataReviewJobService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
@@ -18,6 +21,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
@@ -28,7 +32,7 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@Tag(name = "스케줄링 수동 호출 테스트", description = "스케줄링 수동 호출 테스트 API")
@Tag(name = "스케줄링 및 jar 수동 호출 테스트", description = "스케줄링 및 jar 수동 호출 테스트 API")
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/schedule")
@@ -42,6 +46,7 @@ public class SchedulerApiController {
private final TrainingDataReviewJobService trainingDataReviewJobService;
private final MemberInactiveJobService memberInactiveJobService;
private final MapSheetMngFileJobController mapSheetMngFileJobController;
private final InferenceResultShpService inferenceResultShpService;
private final GukYuinApiService gukYuinApiService;
@Operation(summary = "국유인 탐지객체 조회 PNU 업데이트 스케줄링", description = "국유인 탐지객체 조회 PNU 업데이트 스케줄링")
@@ -137,4 +142,37 @@ public class SchedulerApiController {
return ApiResponseDto.createOK("OK");
}
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
@ApiResponses(
value = {
@ApiResponse(
responseCode = "201",
description = "데이터 저장 성공",
content =
@Content(
mediaType = "application/json",
schema =
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
})
@PostMapping("/save/inference/{uuid}")
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
@PathVariable
UUID uuid) {
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(uuid));
}
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
@PostMapping("/shp/inference/{uuid}")
public ApiResponseDto<Void> createShp(
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
@PathVariable
UUID uuid) {
// shp 파일 수동생성
inferenceResultShpService.createShp(uuid);
return ApiResponseDto.createOK(null);
}
}

View File

@@ -214,7 +214,7 @@ public class MapSheetInferenceJobService {
// 현재 모델 종료 업데이트
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
// M3이면 전체 종료
// G3이면 전체 종료
if (ModelType.G3.getId().equals(currentType)) {
endAll(sheet, now);
return;
@@ -235,6 +235,13 @@ public class MapSheetInferenceJobService {
* @param now
*/
private void endAll(InferenceBatchSheet sheet, ZonedDateTime now) {
List<Long> batchIds =
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
.filter(Objects::nonNull)
.distinct()
.toList();
SaveInferenceAiDto save = new SaveInferenceAiDto();
save.setUuid(sheet.getUuid());
save.setStatus(Status.END.getId());
@@ -246,12 +253,6 @@ public class MapSheetInferenceJobService {
inferenceResultCoreService.upsertGeomData(sheet.getId());
// 추론 종료일때 shp 파일 생성
List<Long> batchIds =
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
.filter(Objects::nonNull)
.distinct()
.toList();
String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(","));
// shp 파일 비동기 생성

View File

@@ -1,6 +1,7 @@
package com.kamco.cd.kamcoback.scheduler.service;
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock;
import java.nio.file.Paths;
import lombok.RequiredArgsConstructor;
@@ -13,11 +14,15 @@ import org.springframework.stereotype.Service;
@RequiredArgsConstructor
public class ShpPipelineService {
private final InferenceResultCoreService inferenceResultCoreService;
private final ExternalJarRunner externalJarRunner;
private final ShpKeyLock shpKeyLock;
@Async("shpExecutor")
public void runPipeline(String jarPath, String datasetDir, String batchId, String inferenceId) {
public void runPipeline(String jarPath, String datasetDir, String batchIds, String inferenceId) {
//
// batchIds.split(",")
// inferenceResultCoreService.getInferenceResultCnt();
// inferenceId 기준 동시 실행 제한
if (!shpKeyLock.tryLock(inferenceId)) {
@@ -27,7 +32,7 @@ public class ShpPipelineService {
try {
// uid 기준 merge shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "", "MERGED");
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED");
// uid 기준 shp 파일 geoserver 등록
String register =
@@ -36,7 +41,7 @@ public class ShpPipelineService {
externalJarRunner.run(jarPath, register, inferenceId);
// uid 기준 도엽별 shp, geojson 파일 생성
externalJarRunner.run(jarPath, batchId, inferenceId, "", "RESOLVE");
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
log.info("SHP pipeline finished. inferenceId={}", inferenceId);