Merge pull request 'feat/infer_dev_260211' (#125) from feat/infer_dev_260211 into develop
Reviewed-on: #125
This commit was merged in pull request #125.
This commit is contained in:
47
inference-table-index.sh
Normal file
47
inference-table-index.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
#############################################
|
||||
# PostgreSQL INDEX CREATE SCRIPT
|
||||
# 에러 발생해도 계속 진행
|
||||
#############################################
|
||||
|
||||
# set -e 제거 (중단 안함)
|
||||
|
||||
# ===== 환경변수 체크 =====
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_PORT" ] || [ -z "$DB_NAME" ] || [ -z "$DB_USER" ]; then
|
||||
echo "DB 환경변수가 설정되지 않았습니다."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "========================================"
|
||||
echo "START INDEX CREATE"
|
||||
echo "TIME: $(date)"
|
||||
echo "========================================"
|
||||
|
||||
run_index() {
|
||||
echo "----------------------------------------"
|
||||
echo "Running: $1"
|
||||
psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -c "$1"
|
||||
echo "----------------------------------------"
|
||||
}
|
||||
|
||||
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_uid_ext_state
|
||||
ON tb_map_sheet_mng_files (hst_uid, file_ext, file_state);"
|
||||
|
||||
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mng_files_hstuid_ext
|
||||
ON tb_map_sheet_mng_files (hst_uid, file_ext);"
|
||||
|
||||
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx_no_use
|
||||
ON tb_map_inkx_5k (mapidcd_no, use_inference);"
|
||||
|
||||
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_inkx5k_mapidcd
|
||||
ON tb_map_inkx_5k (mapidcd_no);"
|
||||
|
||||
run_index "CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_hst_exec_sheetnum_yyyy_desc
|
||||
ON tb_map_sheet_mng_hst (map_sheet_num, mng_yyyy DESC);"
|
||||
|
||||
echo "========================================"
|
||||
echo "END INDEX CREATE"
|
||||
echo "TIME: $(date)"
|
||||
echo "========================================"
|
||||
echo "모든 인덱스 시도 완료"
|
||||
@@ -562,10 +562,12 @@ public class GukYuinApiService {
|
||||
+ "&yyyymmdd="
|
||||
+ yyyymmdd;
|
||||
|
||||
log.info("##### API 호출 URL : {}", url);
|
||||
ExternalCallResult<ChngDetectMastDto.RlbDtctDto> result =
|
||||
externalHttpClient.call(
|
||||
url, HttpMethod.GET, null, netUtils.jsonHeaders(), ChngDetectMastDto.RlbDtctDto.class);
|
||||
|
||||
log.info("##### API 호출 완료 : {}", result.toString());
|
||||
this.insertGukyuinAuditLog(
|
||||
EventType.LIST.getId(),
|
||||
netUtils.getLocalIP(),
|
||||
@@ -636,4 +638,8 @@ public class GukYuinApiService {
|
||||
public List<String> findStbltObjectIds(String uid, String mapSheetNum) {
|
||||
return gukyuinCoreService.findStbltObjectIds(uid, mapSheetNum);
|
||||
}
|
||||
|
||||
public Integer updateStbltRandomData(String uid, int updateCnt) {
|
||||
return gukyuinCoreService.updateStbltRandomData(uid, updateCnt);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
@@ -35,6 +36,7 @@ import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
@@ -373,6 +375,9 @@ public class InferenceResultApiController {
|
||||
}
|
||||
|
||||
Path zipPath = Path.of(path);
|
||||
if (!Files.isRegularFile(zipPath)) {
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body("추론이 완료되지 않아 파일이 생성되지 않았습니다.");
|
||||
}
|
||||
|
||||
return rangeDownloadResponder.buildZipResponse(zipPath, uid + ".zip", request);
|
||||
}
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
package com.kamco.cd.kamcoback.inference;
|
||||
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
|
||||
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@Tag(name = "추론결과 데이터 생성", description = "추론결과 데이터 생성 API")
|
||||
@Log4j2
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/api/inference/shp")
|
||||
public class InferenceResultShpApiController {
|
||||
|
||||
private final InferenceResultShpService inferenceResultShpService;
|
||||
public static final String MAP_ID =
|
||||
"{ \"mapIds\": [\"37716096\",\"37716095\",\"37716094\",\"37716091\",\"37716086\",\"37716085\",\"37716084\",\"37716083\",\"37716076\",\"37716066\",\"37716065\",\"37716064\",\"37716063\",\"37716061\",\"37716051\",\"37716011\"] }";
|
||||
|
||||
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "201",
|
||||
description = "데이터 저장 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/save/{learnId}")
|
||||
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
|
||||
@PathVariable Long learnId) {
|
||||
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(learnId));
|
||||
}
|
||||
|
||||
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
|
||||
@PostMapping("/shp/{uuid}")
|
||||
public ApiResponseDto<Void> createShp(
|
||||
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae") @PathVariable UUID uuid) {
|
||||
// shp 파일 수동생성
|
||||
inferenceResultShpService.createShp(uuid);
|
||||
return ApiResponseDto.createOK(null);
|
||||
}
|
||||
}
|
||||
@@ -71,14 +71,16 @@ public class InferenceResultShpDto {
|
||||
@NoArgsConstructor
|
||||
public static class InferenceCntDto {
|
||||
|
||||
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 목록 저장 터이터 건수", example = "120")
|
||||
@Schema(
|
||||
description = "추론 결과(inference_results_testing)를 기준으로 데이터 목록 저장 터이터 건수",
|
||||
example = "120")
|
||||
int sheetAnalDataCnt;
|
||||
|
||||
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 데이터 건수", example = "120")
|
||||
int inferenceCnt;
|
||||
|
||||
@Schema(description = "추론 결과(inference_results)를 기준으로 신규 저장 Geom 데이터 건수", example = "120")
|
||||
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 Geom 데이터 건수", example = "120")
|
||||
int inferenceGeomCnt;
|
||||
|
||||
@Schema(description = "추론 결과(inference_results_testing)를 기준으로 저장 집계 데이터 건수", example = "120")
|
||||
int inferenceSttcnt;
|
||||
}
|
||||
|
||||
@Setter
|
||||
|
||||
@@ -37,10 +37,16 @@ public class InferenceResultShpService {
|
||||
@Value("${file.dataset-dir}")
|
||||
private String datasetDir;
|
||||
|
||||
/** inference_results 테이블을 기준으로 분석 결과 테이블과 도형 테이블을 최신 상태로 반영한다. */
|
||||
/**
|
||||
* 추론 결과 inference 테이블 upsert
|
||||
*
|
||||
* @param uuid learn uuid
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(Long id) {
|
||||
return coreService.buildInferenceData(id);
|
||||
public InferenceResultShpDto.InferenceCntDto saveInferenceResultData(UUID uuid) {
|
||||
Long learnId = inferenceResultCoreService.getInferenceLearnIdByUuid(uuid);
|
||||
return coreService.buildInferenceData(learnId);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -55,13 +61,13 @@ public class InferenceResultShpService {
|
||||
return;
|
||||
}
|
||||
|
||||
String batchId =
|
||||
String batchIds =
|
||||
Stream.of(dto.getM1ModelBatchId(), dto.getM2ModelBatchId(), dto.getM3ModelBatchId())
|
||||
.filter(Objects::nonNull)
|
||||
.map(String::valueOf)
|
||||
.collect(Collectors.joining(","));
|
||||
|
||||
// shp 파일 비동기 생성
|
||||
shpPipelineService.runPipeline(jarPath, datasetDir, batchId, dto.getUid());
|
||||
shpPipelineService.runPipeline(jarPath, datasetDir, batchIds, dto.getUid());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -219,6 +219,9 @@ public class WorkerStatsDto {
|
||||
@Deprecated
|
||||
@Schema(description = "[Deprecated] inspectionRemainingCount 사용 권장")
|
||||
private Long remainingInspectCount;
|
||||
|
||||
@Schema(description = "파일 다운로드 가능한 폴리곤 수")
|
||||
private Long downloadPolygonCnt;
|
||||
}
|
||||
|
||||
@Getter
|
||||
|
||||
@@ -81,4 +81,8 @@ public class GukYuinCoreService {
|
||||
public List<String> findStbltObjectIds(String uid, String mapSheetNum) {
|
||||
return gukYuinRepository.findStbltObjectIds(uid, mapSheetNum);
|
||||
}
|
||||
|
||||
public Integer updateStbltRandomData(String uid, int updateCnt) {
|
||||
return gukYuinRepository.updateStbltRandomData(uid, updateCnt);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,7 +271,7 @@ public class InferenceResultCoreService {
|
||||
.getInferenceResultByUuid(request.getUuid())
|
||||
.orElseThrow(EntityNotFoundException::new);
|
||||
|
||||
// M1/M2/M3 영역 업데이트
|
||||
// G1/G2/G3 영역 업데이트
|
||||
if (request.getType() != null) {
|
||||
applyModelUpdate(entity, request);
|
||||
}
|
||||
|
||||
@@ -3,10 +3,12 @@ package com.kamco.cd.kamcoback.postgres.core;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
|
||||
import com.kamco.cd.kamcoback.postgres.repository.Inference.InferenceResultRepository;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.log4j.Log4j2;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@Service
|
||||
@Log4j2
|
||||
@RequiredArgsConstructor
|
||||
public class InferenceResultShpCoreService {
|
||||
|
||||
@@ -15,15 +17,27 @@ public class InferenceResultShpCoreService {
|
||||
/**
|
||||
* inference_results 기준으로 - tb_map_sheet_anal_data_inference -
|
||||
* tb_map_sheet_anal_data_inference_geom 테이블을 최신 상태로 구성한다.
|
||||
*
|
||||
* @param id learn id
|
||||
* @return
|
||||
*/
|
||||
@Transactional
|
||||
public InferenceResultShpDto.InferenceCntDto buildInferenceData(Long id) {
|
||||
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
|
||||
repo.upsertGroupsFromInferenceResults(analId);
|
||||
repo.upsertGeomsFromInferenceResults(analId);
|
||||
repo.upsertSttcFromInferenceResults(analId);
|
||||
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
|
||||
|
||||
Long analId = repo.upsertGroupsFromMapSheetAnal(id);
|
||||
int analDataCnt = repo.upsertGroupsFromInferenceResults(analId);
|
||||
int geomCnt = repo.upsertGeomsFromInferenceResults(analId);
|
||||
int sttcCnt = repo.upsertSttcFromInferenceResults(analId);
|
||||
InferenceResultShpDto.InferenceCntDto cntDto = new InferenceResultShpDto.InferenceCntDto();
|
||||
cntDto.setSheetAnalDataCnt(analDataCnt);
|
||||
cntDto.setInferenceGeomCnt(geomCnt);
|
||||
cntDto.setInferenceSttcnt(sttcCnt);
|
||||
log.info(
|
||||
"[ANAL SAVE] analId={}, tb_map_sheet_anal_data_inference={}, tb_map_sheet_anal_data_inference_geom={}, tb_map_sheet_anal_sttc={}",
|
||||
analId,
|
||||
analDataCnt,
|
||||
geomCnt,
|
||||
sttcCnt);
|
||||
return cntDto;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,6 +202,33 @@ public class MapSheetLearnEntity {
|
||||
@Column(name = "chn_dtct_mst_id")
|
||||
private String chnDtctMstId;
|
||||
|
||||
@Column(name = "shp_create_status")
|
||||
private String shp_create_status;
|
||||
|
||||
@Column(name = "shp_create_message")
|
||||
private String shp_create_message;
|
||||
|
||||
@Column(name = "shp_create_status_dttm")
|
||||
private ZonedDateTime shp_create_status_dttm;
|
||||
|
||||
@Column(name = "shp_status")
|
||||
private String shp_status;
|
||||
|
||||
@Column(name = "shp_stage")
|
||||
private String shp_stage;
|
||||
|
||||
@Column(name = "shp_started_dttm")
|
||||
private ZonedDateTime shp_started_dttm;
|
||||
|
||||
@Column(name = "shp_ended_dttm")
|
||||
private ZonedDateTime shp_ended_dttm;
|
||||
|
||||
@Column(name = "shp_last_message")
|
||||
private String shp_last_message;
|
||||
|
||||
@Column(name = "shp_error_message")
|
||||
private String shp_error_message;
|
||||
|
||||
public InferenceResultDto.ResultList toDto() {
|
||||
return new InferenceResultDto.ResultList(
|
||||
this.uuid,
|
||||
|
||||
@@ -8,11 +8,11 @@ public interface InferenceResultRepositoryCustom {
|
||||
|
||||
Long upsertGroupsFromMapSheetAnal(Long id);
|
||||
|
||||
void upsertGroupsFromInferenceResults(Long analId);
|
||||
int upsertGroupsFromInferenceResults(Long analId);
|
||||
|
||||
void upsertGeomsFromInferenceResults(Long analId);
|
||||
int upsertGeomsFromInferenceResults(Long analId);
|
||||
|
||||
void upsertSttcFromInferenceResults(Long analId);
|
||||
int upsertSttcFromInferenceResults(Long analId);
|
||||
|
||||
Long getInferenceLearnIdByUuid(UUID uuid);
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
* @return 반영된 행 수
|
||||
*/
|
||||
@Override
|
||||
public void upsertGroupsFromInferenceResults(Long analId) {
|
||||
public int upsertGroupsFromInferenceResults(Long analId) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
@@ -124,7 +124,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
updated_dttm = now()
|
||||
""";
|
||||
|
||||
em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
|
||||
return em.createNativeQuery(sql).setParameter("analId", analId).executeUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -136,7 +136,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
* @return 반영된 행 수
|
||||
*/
|
||||
@Override
|
||||
public void upsertGeomsFromInferenceResults(Long analUid) {
|
||||
public int upsertGeomsFromInferenceResults(Long analUid) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
@@ -229,11 +229,11 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
ref_map_sheet_num = EXCLUDED.ref_map_sheet_num
|
||||
""";
|
||||
|
||||
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||
return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void upsertSttcFromInferenceResults(Long analUid) {
|
||||
public int upsertSttcFromInferenceResults(Long analUid) {
|
||||
|
||||
String sql =
|
||||
"""
|
||||
@@ -306,7 +306,7 @@ public class InferenceResultRepositoryImpl implements InferenceResultRepositoryC
|
||||
,updated_uid = EXCLUDED.updated_uid
|
||||
""";
|
||||
|
||||
em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||
return em.createNativeQuery(sql).setParameter("analUid", analUid).executeUpdate();
|
||||
}
|
||||
|
||||
// ===============================
|
||||
|
||||
@@ -47,4 +47,6 @@ public interface GukYuinRepositoryCustom {
|
||||
void updateMapSheetInferenceLabelEndStatus(Long learnId);
|
||||
|
||||
List<String> findStbltObjectIds(String uid, String mapSheetNum);
|
||||
|
||||
Integer updateStbltRandomData(String uid, int updateCnt);
|
||||
}
|
||||
|
||||
@@ -336,6 +336,44 @@ public class GukYuinRepositoryImpl implements GukYuinRepositoryCustom {
|
||||
.fetch();
|
||||
}
|
||||
|
||||
/**
|
||||
* mapSheetAnalDataInferenceGeomEntity 데이터 에 실태조사 값 들어온 것으로 간주하고 update 랜덤으로 하기
|
||||
*
|
||||
* @param uid
|
||||
* @param updateCnt
|
||||
* @return
|
||||
*/
|
||||
@Override
|
||||
public Integer updateStbltRandomData(String uid, int updateCnt) {
|
||||
List<Long> geoUids =
|
||||
queryFactory
|
||||
.select(mapSheetAnalDataInferenceGeomEntity.geoUid)
|
||||
.from(mapSheetLearnEntity)
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(mapSheetLearnEntity.id.eq(mapSheetAnalInferenceEntity.learnId))
|
||||
.innerJoin(mapSheetAnalDataInferenceEntity)
|
||||
.on(mapSheetAnalInferenceEntity.id.eq(mapSheetAnalDataInferenceEntity.analUid))
|
||||
.innerJoin(mapSheetAnalDataInferenceGeomEntity)
|
||||
.on(
|
||||
mapSheetAnalDataInferenceEntity.id.eq(mapSheetAnalDataInferenceGeomEntity.dataUid),
|
||||
mapSheetAnalDataInferenceGeomEntity.fitState.isNull())
|
||||
.where(mapSheetLearnEntity.uid.eq(uid))
|
||||
.orderBy(mapSheetAnalDataInferenceGeomEntity.geoUid.asc())
|
||||
.limit(updateCnt)
|
||||
.fetch();
|
||||
|
||||
for (Long geoUid : geoUids) {
|
||||
queryFactory
|
||||
.update(mapSheetAnalDataInferenceGeomEntity)
|
||||
.set(mapSheetAnalDataInferenceGeomEntity.pnu, 1L)
|
||||
.set(mapSheetAnalDataInferenceGeomEntity.fitState, "Y")
|
||||
.set(mapSheetAnalDataInferenceGeomEntity.fitStateDttm, ZonedDateTime.now())
|
||||
.where(mapSheetAnalDataInferenceGeomEntity.geoUid.eq(geoUid))
|
||||
.execute();
|
||||
}
|
||||
return updateCnt;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void updateGukYuinApplyStateComplete(Long id, GukYuinStatus status) {
|
||||
|
||||
@@ -6,6 +6,7 @@ import static com.kamco.cd.kamcoback.postgres.entity.QLabelingLabelerEntity.labe
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceEntity.mapSheetAnalDataInferenceEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalDataInferenceGeomEntity.mapSheetAnalDataInferenceGeomEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetAnalInferenceEntity.mapSheetAnalInferenceEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnDataGeomEntity.mapSheetLearnDataGeomEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMapSheetLearnEntity.mapSheetLearnEntity;
|
||||
import static com.kamco.cd.kamcoback.postgres.entity.QMemberEntity.memberEntity;
|
||||
|
||||
@@ -388,12 +389,8 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
mapSheetAnalDataInferenceGeomEntity.compareYyyy.eq(analEntity.getCompareYyyy()),
|
||||
mapSheetAnalDataInferenceGeomEntity.targetYyyy.eq(analEntity.getTargetYyyy()),
|
||||
mapSheetAnalDataInferenceGeomEntity.stage.eq(analEntity.getStage()),
|
||||
// mapSheetAnalDataInferenceGeomEntity.pnu.isNotNull()
|
||||
mapSheetAnalDataInferenceGeomEntity.pnu.gt(0L),
|
||||
mapSheetAnalDataInferenceGeomEntity.fitState.eq(
|
||||
ImageryFitStatus.UNFIT.getId()) // TODO:
|
||||
// 추후 라벨링 대상 조건 수정하기
|
||||
)
|
||||
mapSheetAnalDataInferenceGeomEntity.fitState.eq(ImageryFitStatus.UNFIT.getId()))
|
||||
.fetchOne();
|
||||
}
|
||||
|
||||
@@ -493,6 +490,19 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
inspectionStatus = inspectionRemaining > 0 ? "진행중" : "완료";
|
||||
}
|
||||
|
||||
Long downloadPolygonCnt =
|
||||
queryFactory
|
||||
.select(mapSheetLearnDataGeomEntity.geoUid.count())
|
||||
.from(mapSheetLearnDataGeomEntity)
|
||||
.innerJoin(labelingAssignmentEntity)
|
||||
.on(labelingAssignmentEntity.inferenceGeomUid.eq(mapSheetLearnDataGeomEntity.geoUid))
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.id.eq(analUid))
|
||||
.where(mapSheetLearnDataGeomEntity.fileCreateYn.isTrue())
|
||||
.fetchOne();
|
||||
|
||||
return WorkProgressInfo.builder()
|
||||
// 라벨링 (pass_yn = false인 부적합 데이터 기준)
|
||||
.labelingProgressRate(labelingRate)
|
||||
@@ -516,6 +526,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
.completedCount(labelCompleted)
|
||||
.remainingLabelCount(labelingRemaining)
|
||||
.remainingInspectCount(inspectionRemaining)
|
||||
.downloadPolygonCnt(downloadPolygonCnt)
|
||||
.build();
|
||||
}
|
||||
|
||||
@@ -659,6 +670,19 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
inspectionStatus = inspectionRemaining > 0 ? "진행중" : "완료";
|
||||
}
|
||||
|
||||
Long downloadPolygonCnt =
|
||||
queryFactory
|
||||
.select(mapSheetLearnDataGeomEntity.geoUid.count())
|
||||
.from(mapSheetLearnDataGeomEntity)
|
||||
.innerJoin(labelingAssignmentEntity)
|
||||
.on(labelingAssignmentEntity.inferenceGeomUid.eq(mapSheetLearnDataGeomEntity.geoUid))
|
||||
.innerJoin(mapSheetAnalInferenceEntity)
|
||||
.on(
|
||||
labelingAssignmentEntity.analUid.eq(mapSheetAnalInferenceEntity.id),
|
||||
mapSheetAnalInferenceEntity.uuid.eq(targetUuid))
|
||||
.where(mapSheetLearnDataGeomEntity.fileCreateYn.isTrue())
|
||||
.fetchOne();
|
||||
|
||||
return WorkProgressInfo.builder()
|
||||
.labelingProgressRate(labelingRate)
|
||||
.labelingStatus(labelingStatus)
|
||||
@@ -679,6 +703,7 @@ public class LabelAllocateRepositoryImpl implements LabelAllocateRepositoryCusto
|
||||
.completedCount(labelCompleted)
|
||||
.remainingLabelCount(labelingRemaining)
|
||||
.remainingInspectCount(inspectionRemaining)
|
||||
.downloadPolygonCnt(downloadPolygonCnt)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ package com.kamco.cd.kamcoback.scheduler;
|
||||
import com.kamco.cd.kamcoback.code.dto.CommonCodeDto;
|
||||
import com.kamco.cd.kamcoback.config.api.ApiResponseDto;
|
||||
import com.kamco.cd.kamcoback.gukyuin.service.GukYuinApiService;
|
||||
import com.kamco.cd.kamcoback.inference.dto.InferenceResultShpDto;
|
||||
import com.kamco.cd.kamcoback.inference.service.InferenceResultShpService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiLabelJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiPnuJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.GukYuinApiStatusJobService;
|
||||
@@ -11,6 +13,7 @@ import com.kamco.cd.kamcoback.scheduler.service.MemberInactiveJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataLabelJobService;
|
||||
import com.kamco.cd.kamcoback.scheduler.service.TrainingDataReviewJobService;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
@@ -18,6 +21,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
@@ -28,7 +32,7 @@ import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@Tag(name = "스케줄링 수동 호출 테스트", description = "스케줄링 수동 호출 테스트 API")
|
||||
@Tag(name = "스케줄링 및 jar 수동 호출 테스트", description = "스케줄링 및 jar 수동 호출 테스트 API")
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@RequestMapping("/api/schedule")
|
||||
@@ -42,6 +46,7 @@ public class SchedulerApiController {
|
||||
private final TrainingDataReviewJobService trainingDataReviewJobService;
|
||||
private final MemberInactiveJobService memberInactiveJobService;
|
||||
private final MapSheetMngFileJobController mapSheetMngFileJobController;
|
||||
private final InferenceResultShpService inferenceResultShpService;
|
||||
private final GukYuinApiService gukYuinApiService;
|
||||
|
||||
@Operation(summary = "국유인 탐지객체 조회 PNU 업데이트 스케줄링", description = "국유인 탐지객체 조회 PNU 업데이트 스케줄링")
|
||||
@@ -137,4 +142,44 @@ public class SchedulerApiController {
|
||||
|
||||
return ApiResponseDto.createOK("OK");
|
||||
}
|
||||
|
||||
@Operation(summary = "추론결과 데이터 저장", description = "추론결과 데이터 저장")
|
||||
@ApiResponses(
|
||||
value = {
|
||||
@ApiResponse(
|
||||
responseCode = "201",
|
||||
description = "데이터 저장 성공",
|
||||
content =
|
||||
@Content(
|
||||
mediaType = "application/json",
|
||||
schema =
|
||||
@Schema(implementation = InferenceResultShpDto.InferenceCntDto.class))),
|
||||
@ApiResponse(responseCode = "400", description = "잘못된 검색 조건", content = @Content),
|
||||
@ApiResponse(responseCode = "500", description = "서버 오류", content = @Content)
|
||||
})
|
||||
@PostMapping("/save/inference/{uuid}")
|
||||
public ApiResponseDto<InferenceResultShpDto.InferenceCntDto> saveInferenceData(
|
||||
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
|
||||
@PathVariable
|
||||
UUID uuid) {
|
||||
return ApiResponseDto.createOK(inferenceResultShpService.saveInferenceResultData(uuid));
|
||||
}
|
||||
|
||||
@Operation(summary = "추론결과 shp 생성", description = "추론결과 shp 생성")
|
||||
@PostMapping("/shp/inference/{uuid}")
|
||||
public ApiResponseDto<Void> createShp(
|
||||
@Parameter(example = "feb2ec0b-a0f7-49ca-95e4-98b2231bdaae", description = "추론 uuid")
|
||||
@PathVariable
|
||||
UUID uuid) {
|
||||
// shp 파일 수동생성
|
||||
inferenceResultShpService.createShp(uuid);
|
||||
return ApiResponseDto.createOK(null);
|
||||
}
|
||||
|
||||
@Operation(summary = "국유인 실태조사 적합여부 랜덤 업데이트", description = "국유인 실태조사 적합여부 랜덤 업데이트")
|
||||
@PutMapping("/gukyuin/random-stblt-update/{uid}/{updateCnt}")
|
||||
public ApiResponseDto<Integer> updateStbltRandomData(
|
||||
@PathVariable String uid, @PathVariable int updateCnt) {
|
||||
return ApiResponseDto.ok(gukYuinApiService.updateStbltRandomData(uid, updateCnt));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,24 +38,26 @@ public class GukYuinApiStbltJobService {
|
||||
return "local".equalsIgnoreCase(profile);
|
||||
}
|
||||
|
||||
// @Scheduled(cron = "0 0 3 * * *")
|
||||
public void runTask() {
|
||||
findGukYuinEligibleForSurvey(null);
|
||||
}
|
||||
|
||||
/** 국유인 연동 후, 실태조사 적합여부 확인하여 update */
|
||||
public void findGukYuinEligibleForSurvey(LocalDate baseDate) {
|
||||
// if (isLocalProfile()) {
|
||||
// return;
|
||||
// }
|
||||
|
||||
log.info("[Step 1-1] 국유인 연동 PNU 완료된 추론 회차 정보 가져오기 ");
|
||||
log.info(" learn 테이블의 apply_status : {}", GukYuinStatus.PNU_COMPLETED.getId());
|
||||
List<LearnKeyDto> list =
|
||||
gukYuinStbltJobCoreService.findGukYuinEligibleForSurveyList(
|
||||
GukYuinStatus.PNU_COMPLETED.getId());
|
||||
|
||||
log.info("[Step 1-2] 국유인 연동 PNU 완료된 추론 회차 갯수 : {}", list.size());
|
||||
if (list.isEmpty()) {
|
||||
log.info("[Step 1-3] 국유인 연동 PNU 완료된 추론 회차 갯수 없어서 return");
|
||||
return;
|
||||
}
|
||||
|
||||
log.info("[Step 2-1] 추론 회차 list 로 for문 실행하기 ");
|
||||
for (LearnKeyDto dto : list) {
|
||||
try {
|
||||
String targetDate =
|
||||
@@ -63,19 +65,30 @@ public class GukYuinApiStbltJobService {
|
||||
.minusDays(1)
|
||||
.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
|
||||
|
||||
log.info("[Step 2-2] 실태조사 적합여부 조회 날짜 확인 : {}", targetDate);
|
||||
if (baseDate != null) { // 파라미터가 있으면
|
||||
targetDate = baseDate.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
|
||||
log.info("[Step 2-3] 수동호출 baseDate 가 있을 경우, 실태조사 적합여부 조회 날짜 확인 : {}", targetDate);
|
||||
}
|
||||
|
||||
log.info("[Step 3-1] 국유인 실태조사 적합여부 API 호출 시작 ");
|
||||
log.info(" === 값 확인 - uid : {}", dto.getUid());
|
||||
log.info(" === 값 확인 - targetDate : {}", targetDate);
|
||||
RlbDtctDto result = gukYuinApiService.findRlbDtctList(dto.getUid(), targetDate, "Y");
|
||||
|
||||
if (result == null || result.getResult() == null || result.getResult().isEmpty()) {
|
||||
log.warn("[GUKYUIN] empty result chnDtctMstId={}", dto.getChnDtctMstId());
|
||||
log.info("[GUKYUIN] empty result chnDtctId={}", dto.getUid());
|
||||
log.info("=== 국유인 API 조회 결과 없어서 continue");
|
||||
continue;
|
||||
}
|
||||
|
||||
log.info("[Step 4-1] 국유인 실태조사 적합여부 result 값으로 데이터 업데이트");
|
||||
log.info(" === 데이터 갯수 : {}", result.getResult().size());
|
||||
|
||||
for (RlbDtctMastDto stbltDto : result.getResult()) {
|
||||
log.info("[Step 4-2] 국유인 실태조사 적합여부 결과 가져오기");
|
||||
String resultUid = stbltDto.getChnDtctObjtId();
|
||||
log.info(" == 테이블 tb_pnu 에 적합여부 리턴 결과를 upsert 진행, 객체 uid : {}", resultUid);
|
||||
gukYuinStbltJobCoreService.updateGukYuinEligibleForSurvey(resultUid, stbltDto);
|
||||
}
|
||||
|
||||
@@ -90,6 +103,7 @@ public class GukYuinApiStbltJobService {
|
||||
e -> {
|
||||
List<RlbDtctMastDto> pnuList = e.getValue();
|
||||
|
||||
log.info("[Step 4-3] 국유인 실태조사 적합여부 업데이트 값을 객체 uid 기준으로 DTO 생성");
|
||||
boolean hasY = pnuList.stream().anyMatch(v -> "Y".equals(v.getStbltYn()));
|
||||
|
||||
String fitYn = hasY ? "Y" : "N";
|
||||
@@ -105,7 +119,9 @@ public class GukYuinApiStbltJobService {
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
|
||||
log.info(" === selected DTO : {}", selected);
|
||||
if (selected == null) {
|
||||
log.info(" === selected NULL");
|
||||
return null; // 방어 코드
|
||||
}
|
||||
|
||||
@@ -113,10 +129,11 @@ public class GukYuinApiStbltJobService {
|
||||
fitYn, selected.getIncyCd(), selected.getIncyRsnCont());
|
||||
}));
|
||||
|
||||
log.info("[Step 4-4] 국유인 실태조사 적합여부, 사유, 내용을 inference_geom 테이블에 update");
|
||||
resultMap.forEach(gukYuinStbltJobCoreService::updateGukYuinObjectStbltYn);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("[GUKYUIN] failed uid={}", dto.getChnDtctMstId(), e);
|
||||
log.error("[GUKYUIN] failed uid={}", dto.getUid(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,7 +214,7 @@ public class MapSheetInferenceJobService {
|
||||
// 현재 모델 종료 업데이트
|
||||
updateProcessingEndTimeByModel(job, sheet.getUuid(), now, currentType);
|
||||
|
||||
// M3이면 전체 종료
|
||||
// G3이면 전체 종료
|
||||
if (ModelType.G3.getId().equals(currentType)) {
|
||||
endAll(sheet, now);
|
||||
return;
|
||||
@@ -235,6 +235,13 @@ public class MapSheetInferenceJobService {
|
||||
* @param now
|
||||
*/
|
||||
private void endAll(InferenceBatchSheet sheet, ZonedDateTime now) {
|
||||
|
||||
List<Long> batchIds =
|
||||
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
SaveInferenceAiDto save = new SaveInferenceAiDto();
|
||||
save.setUuid(sheet.getUuid());
|
||||
save.setStatus(Status.END.getId());
|
||||
@@ -246,12 +253,6 @@ public class MapSheetInferenceJobService {
|
||||
inferenceResultCoreService.upsertGeomData(sheet.getId());
|
||||
|
||||
// 추론 종료일때 shp 파일 생성
|
||||
List<Long> batchIds =
|
||||
Stream.of(sheet.getM1BatchId(), sheet.getM2BatchId(), sheet.getM3BatchId())
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
String batchIdStr = batchIds.stream().map(String::valueOf).collect(Collectors.joining(","));
|
||||
|
||||
// shp 파일 비동기 생성
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.kamco.cd.kamcoback.scheduler.service;
|
||||
|
||||
import com.kamco.cd.kamcoback.common.service.ExternalJarRunner;
|
||||
import com.kamco.cd.kamcoback.postgres.core.InferenceResultCoreService;
|
||||
import com.kamco.cd.kamcoback.scheduler.config.ShpKeyLock;
|
||||
import java.nio.file.Paths;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -13,11 +14,15 @@ import org.springframework.stereotype.Service;
|
||||
@RequiredArgsConstructor
|
||||
public class ShpPipelineService {
|
||||
|
||||
private final InferenceResultCoreService inferenceResultCoreService;
|
||||
private final ExternalJarRunner externalJarRunner;
|
||||
private final ShpKeyLock shpKeyLock;
|
||||
|
||||
@Async("shpExecutor")
|
||||
public void runPipeline(String jarPath, String datasetDir, String batchId, String inferenceId) {
|
||||
public void runPipeline(String jarPath, String datasetDir, String batchIds, String inferenceId) {
|
||||
//
|
||||
// batchIds.split(",")
|
||||
// inferenceResultCoreService.getInferenceResultCnt();
|
||||
|
||||
// inferenceId 기준 동시 실행 제한
|
||||
if (!shpKeyLock.tryLock(inferenceId)) {
|
||||
@@ -27,7 +32,7 @@ public class ShpPipelineService {
|
||||
|
||||
try {
|
||||
// uid 기준 merge shp, geojson 파일 생성
|
||||
externalJarRunner.run(jarPath, batchId, inferenceId, "", "MERGED");
|
||||
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "MERGED");
|
||||
|
||||
// uid 기준 shp 파일 geoserver 등록
|
||||
String register =
|
||||
@@ -36,7 +41,7 @@ public class ShpPipelineService {
|
||||
externalJarRunner.run(jarPath, register, inferenceId);
|
||||
|
||||
// uid 기준 도엽별 shp, geojson 파일 생성
|
||||
externalJarRunner.run(jarPath, batchId, inferenceId, "", "RESOLVE");
|
||||
externalJarRunner.run(jarPath, batchIds, inferenceId, "", "RESOLVE");
|
||||
|
||||
log.info("SHP pipeline finished. inferenceId={}", inferenceId);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user